Merge origin/main with local UI refactor integration

This commit is contained in:
2026-02-25 00:38:19 -05:00
42 changed files with 4886 additions and 188 deletions

View File

@@ -2,9 +2,14 @@ import { randomUUID } from "node:crypto";
import type { JsonObject } from "./types.js";
export type PlanningDomainEventType = "requirements_defined" | "tasks_planned";
export type ExecutionDomainEventType = "code_committed" | "task_blocked";
export type ExecutionDomainEventType = "code_committed" | "task_blocked" | "task_ready_for_review";
export type ValidationDomainEventType = "validation_passed" | "validation_failed";
export type IntegrationDomainEventType = "branch_merged";
export type IntegrationDomainEventType =
| "branch_merged"
| "merge_conflict_detected"
| "merge_conflict_resolved"
| "merge_conflict_unresolved"
| "merge_retry_started";
export type DomainEventType =
| PlanningDomainEventType
@@ -46,9 +51,14 @@ const DOMAIN_EVENT_TYPES = new Set<DomainEventType>([
"tasks_planned",
"code_committed",
"task_blocked",
"task_ready_for_review",
"validation_passed",
"validation_failed",
"branch_merged",
"merge_conflict_detected",
"merge_conflict_resolved",
"merge_conflict_unresolved",
"merge_retry_started",
]);
export function isDomainEventType(value: string): value is DomainEventType {

View File

@@ -50,10 +50,14 @@ function toNodeAttemptSeverity(status: ActorResultStatus): RuntimeEventSeverity
}
function toDomainEventSeverity(type: DomainEventType): RuntimeEventSeverity {
if (type === "task_blocked") {
if (type === "task_blocked" || type === "merge_conflict_unresolved") {
return "critical";
}
if (type === "validation_failed") {
if (
type === "validation_failed" ||
type === "merge_conflict_detected" ||
type === "merge_retry_started"
) {
return "warning";
}
return "info";

View File

@@ -2,6 +2,7 @@ import { resolve } from "node:path";
import { getConfig, loadConfig, type AppConfig } from "../config.js";
import { createDefaultMcpRegistry, loadMcpConfigFromEnv, McpRegistry } from "../mcp.js";
import { parseAgentManifest, type AgentManifest } from "./manifest.js";
import type { DomainEventEmission } from "./domain-events.js";
import { AgentManager } from "./manager.js";
import {
PersonaRegistry,
@@ -13,12 +14,19 @@ import {
type ActorExecutionSecurityContext,
type ActorExecutor,
type PipelineRunSummary,
type TaskExecutionLifecycle,
} from "./pipeline.js";
import { FileSystemProjectContextStore } from "./project-context.js";
import {
FileSystemProjectContextStore,
type ProjectTask,
type ProjectTaskStatus,
} from "./project-context.js";
import { FileSystemStateContextManager, type StoredSessionState } from "./state-context.js";
import type { JsonObject } from "./types.js";
import { SessionWorktreeManager, type SessionMetadata } from "./session-lifecycle.js";
import {
SecureCommandExecutor,
type SecurityViolationHandling,
type SecurityAuditEvent,
type SecurityAuditSink,
SecurityRulesEngine,
@@ -38,7 +46,8 @@ export type OrchestrationSettings = {
maxDepth: number;
maxRetries: number;
maxChildren: number;
securityViolationHandling: "hard_abort" | "validation_fail";
mergeConflictMaxAttempts: number;
securityViolationHandling: SecurityViolationHandling;
runtimeContext: Record<string, string | number | boolean>;
};
@@ -56,6 +65,7 @@ export function loadOrchestrationSettingsFromEnv(
maxDepth: config.orchestration.maxDepth,
maxRetries: config.orchestration.maxRetries,
maxChildren: config.orchestration.maxChildren,
mergeConflictMaxAttempts: config.orchestration.mergeConflictMaxAttempts,
securityViolationHandling: config.security.violationHandling,
};
}
@@ -181,6 +191,9 @@ function createActorSecurityContext(input: {
type: `security.${event.type}`,
severity: mapSecurityAuditSeverity(event),
message: toSecurityAuditMessage(event),
...(event.sessionId ? { sessionId: event.sessionId } : {}),
...(event.nodeId ? { nodeId: event.nodeId } : {}),
...(typeof event.attempt === "number" ? { attempt: event.attempt } : {}),
metadata: toSecurityAuditMetadata(event),
});
};
@@ -199,6 +212,9 @@ function createActorSecurityContext(input: {
blockedEnvAssignments: ["AGENT_STATE_ROOT", "AGENT_PROJECT_CONTEXT_PATH"],
},
auditSink,
{
violationHandling: input.settings.securityViolationHandling,
},
);
return {
@@ -221,6 +237,57 @@ function createActorSecurityContext(input: {
};
}
function resolveSessionProjectContextPath(stateRoot: string, sessionId: string): string {
return resolve(stateRoot, sessionId, "project-context.json");
}
function readTaskIdFromPayload(payload: JsonObject, fallback: string): string {
const candidates = [payload.taskId, payload.task_id, payload.task];
for (const candidate of candidates) {
if (typeof candidate === "string" && candidate.trim().length > 0) {
return candidate.trim();
}
}
return fallback;
}
function toTaskStatusForFailure(
resultStatus: "validation_fail" | "failure",
statusAtStart: string,
): ProjectTaskStatus {
if (resultStatus === "failure") {
return "failed";
}
if (statusAtStart === "conflict" || statusAtStart === "resolving_conflict") {
return "conflict";
}
return "in_progress";
}
function shouldMergeFromStatus(statusAtStart: string): boolean {
return statusAtStart === "review" || statusAtStart === "resolving_conflict";
}
function toTaskIdLabel(task: ProjectTask): string {
return task.taskId || task.id || "task";
}
function toJsonObject(value: unknown): JsonObject | undefined {
if (!value || typeof value !== "object" || Array.isArray(value)) {
return undefined;
}
return value as JsonObject;
}
function readMergeConflictAttempts(metadata: JsonObject | undefined): number {
const record = toJsonObject(metadata?.mergeConflict);
const attempts = record?.attempts;
if (typeof attempts === "number" && Number.isInteger(attempts) && attempts >= 0) {
return attempts;
}
return 0;
}
export class SchemaDrivenExecutionEngine {
private readonly manifest: AgentManifest;
private readonly personaRegistry = new PersonaRegistry();
@@ -234,6 +301,7 @@ export class SchemaDrivenExecutionEngine {
private readonly mcpRegistry: McpRegistry;
private readonly runtimeEventPublisher: RuntimeEventPublisher;
private readonly securityContext: ActorExecutionSecurityContext;
private readonly sessionWorktreeManager: SessionWorktreeManager;
constructor(input: {
manifest: AgentManifest | unknown;
@@ -260,6 +328,8 @@ export class SchemaDrivenExecutionEngine {
maxDepth: input.settings?.maxDepth ?? config.orchestration.maxDepth,
maxRetries: input.settings?.maxRetries ?? config.orchestration.maxRetries,
maxChildren: input.settings?.maxChildren ?? config.orchestration.maxChildren,
mergeConflictMaxAttempts:
input.settings?.mergeConflictMaxAttempts ?? config.orchestration.mergeConflictMaxAttempts,
securityViolationHandling:
input.settings?.securityViolationHandling ?? config.security.violationHandling,
runtimeContext: {
@@ -273,6 +343,11 @@ export class SchemaDrivenExecutionEngine {
this.projectContextStore = new FileSystemProjectContextStore({
filePath: this.settings.projectContextPath,
});
this.sessionWorktreeManager = new SessionWorktreeManager({
worktreeRoot: resolve(this.settings.workspaceRoot, this.config.provisioning.gitWorktree.rootDirectory),
baseRef: this.config.provisioning.gitWorktree.baseRef,
targetPath: this.config.provisioning.gitWorktree.targetPath,
});
this.actorExecutors = toExecutorMap(input.actorExecutors);
this.manager =
@@ -352,9 +427,26 @@ export class SchemaDrivenExecutionEngine {
initialPayload: JsonObject;
initialState?: Partial<StoredSessionState>;
signal?: AbortSignal;
sessionMetadata?: SessionMetadata;
}): Promise<PipelineRunSummary> {
const managerSessionId = `${input.sessionId}__pipeline`;
const managerSession = this.manager.createSession(managerSessionId);
const workspaceRoot = input.sessionMetadata
? this.sessionWorktreeManager.resolveWorkingDirectoryForWorktree(
input.sessionMetadata.baseWorkspacePath,
)
: this.settings.workspaceRoot;
const projectContextStore = input.sessionMetadata
? new FileSystemProjectContextStore({
filePath: resolveSessionProjectContextPath(this.settings.stateRoot, input.sessionId),
})
: this.projectContextStore;
const taskLifecycle = input.sessionMetadata
? this.createTaskExecutionLifecycle({
session: input.sessionMetadata,
projectContextStore,
})
: undefined;
const executor = new PipelineExecutor(
this.manifest,
@@ -362,25 +454,26 @@ export class SchemaDrivenExecutionEngine {
this.stateManager,
this.actorExecutors,
{
workspaceRoot: this.settings.workspaceRoot,
workspaceRoot,
runtimeContext: this.settings.runtimeContext,
defaultModelConstraint: this.config.provider.claudeModel,
resolvedExecutionSecurityConstraints: {
dropUid: this.config.security.dropUid !== undefined,
dropGid: this.config.security.dropGid !== undefined,
worktreePath: this.settings.workspaceRoot,
worktreePath: workspaceRoot,
violationMode: this.settings.securityViolationHandling,
},
maxDepth: Math.min(this.settings.maxDepth, this.manifest.topologyConstraints.maxDepth),
maxRetries: Math.min(this.settings.maxRetries, this.manifest.topologyConstraints.maxRetries),
manager: this.manager,
managerSessionId,
projectContextStore: this.projectContextStore,
resolveMcpConfig: ({ providerHint, prompt, toolClearance }) =>
projectContextStore,
resolveMcpConfig: ({ providerHint, prompt, toolClearance, workingDirectory }) =>
loadMcpConfigFromEnv(
{
providerHint,
prompt,
...(workingDirectory ? { workingDirectory } : {}),
},
{
config: this.config,
@@ -391,6 +484,7 @@ export class SchemaDrivenExecutionEngine {
securityViolationHandling: this.settings.securityViolationHandling,
securityContext: this.securityContext,
runtimeEventPublisher: this.runtimeEventPublisher,
...(taskLifecycle ? { taskLifecycle } : {}),
},
);
try {
@@ -405,6 +499,335 @@ export class SchemaDrivenExecutionEngine {
}
}
private createTaskExecutionLifecycle(input: {
session: SessionMetadata;
projectContextStore: FileSystemProjectContextStore;
}): TaskExecutionLifecycle {
return {
prepareTaskExecution: async ({ node, context }) => {
const taskId = readTaskIdFromPayload(context.handoff.payload, node.id);
const projectContext = await input.projectContextStore.readState();
const existing = projectContext.taskQueue.find(
(task) => toTaskIdLabel(task) === taskId,
);
const ensured = await this.sessionWorktreeManager.ensureTaskWorktree({
sessionId: input.session.sessionId,
taskId,
baseWorkspacePath: input.session.baseWorkspacePath,
...(existing?.worktreePath ? { existingWorktreePath: existing.worktreePath } : {}),
});
const statusAtStart: ProjectTaskStatus =
existing?.status === "review" ||
existing?.status === "conflict" ||
existing?.status === "resolving_conflict"
? existing.status
: "in_progress";
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId,
id: taskId,
status: statusAtStart,
worktreePath: ensured.taskWorktreePath,
...(existing?.title ? { title: existing.title } : { title: taskId }),
...(existing?.metadata ? { metadata: existing.metadata } : {}),
},
],
});
return {
taskId,
workingDirectory: ensured.taskWorkingDirectory,
worktreePath: ensured.taskWorktreePath,
statusAtStart,
...(existing?.metadata ? { metadata: existing.metadata } : {}),
};
},
finalizeTaskExecution: async ({ task, result, domainEvents }) => {
const emittedTypes = new Set(domainEvents.map((event) => event.type));
const additionalEvents: DomainEventEmission[] = [];
const emitEvent = (
type: DomainEventEmission["type"],
payload?: DomainEventEmission["payload"],
): void => {
if (emittedTypes.has(type)) {
return;
}
emittedTypes.add(type);
additionalEvents.push(payload ? { type, payload } : { type });
};
if (result.status === "failure" || result.status === "validation_fail") {
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId: task.taskId,
id: task.taskId,
status: toTaskStatusForFailure(result.status, task.statusAtStart),
worktreePath: task.worktreePath,
title: task.taskId,
...(task.metadata ? { metadata: task.metadata } : {}),
},
],
});
return;
}
if (task.statusAtStart === "conflict") {
const attempts = readMergeConflictAttempts(task.metadata);
const metadata: JsonObject = {
...(task.metadata ?? {}),
mergeConflict: {
attempts,
maxAttempts: this.settings.mergeConflictMaxAttempts,
status: "resolved",
resolvedAt: new Date().toISOString(),
},
};
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId: task.taskId,
id: task.taskId,
status: "resolving_conflict",
worktreePath: task.worktreePath,
title: task.taskId,
metadata,
},
],
});
emitEvent("merge_conflict_resolved", {
summary: `Merge conflicts resolved for task "${task.taskId}".`,
details: {
taskId: task.taskId,
worktreePath: task.worktreePath,
attempts,
},
});
return {
additionalEvents,
handoffPayloadPatch: {
taskId: task.taskId,
worktreePath: task.worktreePath,
mergeConflictStatus: "resolved",
mergeConflictAttempts: attempts,
} as JsonObject,
};
}
if (shouldMergeFromStatus(task.statusAtStart)) {
const attemptsBeforeMerge = readMergeConflictAttempts(task.metadata);
if (task.statusAtStart === "resolving_conflict") {
emitEvent("merge_retry_started", {
summary: `Retrying merge for task "${task.taskId}".`,
details: {
taskId: task.taskId,
worktreePath: task.worktreePath,
nextAttempt: attemptsBeforeMerge + 1,
maxAttempts: this.settings.mergeConflictMaxAttempts,
},
});
}
const mergeOutcome = await this.sessionWorktreeManager.mergeTaskIntoBase({
taskId: task.taskId,
baseWorkspacePath: input.session.baseWorkspacePath,
taskWorktreePath: task.worktreePath,
});
if (mergeOutcome.kind === "success") {
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId: task.taskId,
id: task.taskId,
status: "merged",
title: task.taskId,
metadata: {
...(task.metadata ?? {}),
mergeConflict: {
attempts: attemptsBeforeMerge,
maxAttempts: this.settings.mergeConflictMaxAttempts,
status: "merged",
mergedAt: new Date().toISOString(),
},
},
},
],
});
emitEvent("branch_merged", {
summary: `Task "${task.taskId}" merged into session base branch.`,
details: {
taskId: task.taskId,
worktreePath: task.worktreePath,
},
});
return {
additionalEvents,
handoffPayloadPatch: {
taskId: task.taskId,
mergeStatus: "merged",
} as JsonObject,
};
}
if (mergeOutcome.kind === "conflict") {
const attempts = attemptsBeforeMerge + 1;
const exhausted = attempts >= this.settings.mergeConflictMaxAttempts;
const metadata: JsonObject = {
...(task.metadata ?? {}),
mergeConflict: {
attempts,
maxAttempts: this.settings.mergeConflictMaxAttempts,
status: exhausted ? "unresolved" : "conflict",
conflictFiles: mergeOutcome.conflictFiles,
worktreePath: mergeOutcome.worktreePath,
detectedAt: new Date().toISOString(),
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
},
};
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId: task.taskId,
id: task.taskId,
status: "conflict",
worktreePath: task.worktreePath,
title: task.taskId,
metadata,
},
],
});
emitEvent("merge_conflict_detected", {
summary: `Merge conflict detected for task "${task.taskId}".`,
details: {
taskId: task.taskId,
worktreePath: mergeOutcome.worktreePath,
conflictFiles: mergeOutcome.conflictFiles,
attempts,
maxAttempts: this.settings.mergeConflictMaxAttempts,
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
},
});
if (exhausted) {
emitEvent("merge_conflict_unresolved", {
summary:
`Merge conflict attempts exhausted for task "${task.taskId}" ` +
`(${String(attempts)}/${String(this.settings.mergeConflictMaxAttempts)}).`,
details: {
taskId: task.taskId,
worktreePath: mergeOutcome.worktreePath,
conflictFiles: mergeOutcome.conflictFiles,
attempts,
maxAttempts: this.settings.mergeConflictMaxAttempts,
},
});
}
return {
additionalEvents,
handoffPayloadPatch: {
taskId: task.taskId,
worktreePath: task.worktreePath,
mergeConflictStatus: exhausted ? "unresolved" : "conflict",
mergeConflictAttempts: attempts,
mergeConflictMaxAttempts: this.settings.mergeConflictMaxAttempts,
mergeConflictFiles: mergeOutcome.conflictFiles,
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
} as JsonObject,
};
}
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId: task.taskId,
id: task.taskId,
status: "failed",
worktreePath: task.worktreePath,
title: task.taskId,
metadata: {
...(task.metadata ?? {}),
mergeConflict: {
attempts: attemptsBeforeMerge,
maxAttempts: this.settings.mergeConflictMaxAttempts,
status: "fatal_error",
error: mergeOutcome.error,
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
},
},
},
],
});
emitEvent("merge_conflict_unresolved", {
summary: `Fatal merge error for task "${task.taskId}".`,
details: {
taskId: task.taskId,
worktreePath: mergeOutcome.worktreePath,
error: mergeOutcome.error,
...(mergeOutcome.mergeBase ? { mergeBase: mergeOutcome.mergeBase } : {}),
},
});
emitEvent("task_blocked", {
summary: `Task "${task.taskId}" blocked due to fatal merge error.`,
details: {
taskId: task.taskId,
error: mergeOutcome.error,
},
});
return {
additionalEvents,
handoffPayloadPatch: {
taskId: task.taskId,
worktreePath: task.worktreePath,
mergeStatus: "fatal_error",
mergeError: mergeOutcome.error,
} as JsonObject,
};
}
const nextMetadata = task.metadata
? {
...task.metadata,
}
: undefined;
await input.projectContextStore.patchState({
upsertTasks: [
{
taskId: task.taskId,
id: task.taskId,
status: "review",
worktreePath: task.worktreePath,
title: task.taskId,
...(nextMetadata ? { metadata: nextMetadata } : {}),
},
],
});
if (additionalEvents.length > 0) {
return {
additionalEvents,
};
}
return;
},
};
}
private assertRelationshipConstraints(): void {
for (const [parent, edges] of this.childrenByParent.entries()) {
if (edges.length > this.settings.maxChildren) {

View File

@@ -63,6 +63,7 @@ export type ActorExecutionResult = {
export type ActorToolPermissionResult =
| {
behavior: "allow";
updatedInput?: Record<string, unknown>;
toolUseID?: string;
}
| {
@@ -107,6 +108,8 @@ export type ResolvedExecutionContext = {
export type ActorExecutionInput = {
sessionId: string;
attempt: number;
depth: number;
node: PipelineNode;
prompt: string;
context: NodeExecutionContext;
@@ -153,6 +156,7 @@ export type PipelineExecutorOptions = {
securityViolationHandling?: SecurityViolationHandling;
securityContext?: ActorExecutionSecurityContext;
runtimeEventPublisher?: RuntimeEventPublisher;
taskLifecycle?: TaskExecutionLifecycle;
};
export type ActorExecutionSecurityContext = {
@@ -166,6 +170,35 @@ export type ActorExecutionSecurityContext = {
}) => SecureCommandExecutor;
};
export type TaskExecutionResolution = {
taskId: string;
workingDirectory: string;
worktreePath: string;
statusAtStart: string;
metadata?: JsonObject;
};
export type TaskExecutionLifecycle = {
prepareTaskExecution: (input: {
sessionId: string;
node: PipelineNode;
context: NodeExecutionContext;
}) => Promise<TaskExecutionResolution>;
finalizeTaskExecution: (input: {
sessionId: string;
node: PipelineNode;
task: TaskExecutionResolution;
result: ActorExecutionResult;
domainEvents: DomainEvent[];
}) => Promise<
| void
| {
additionalEvents?: DomainEventEmission[];
handoffPayloadPatch?: JsonObject;
}
>;
};
type QueueItem = {
nodeId: string;
depth: number;
@@ -612,9 +645,11 @@ export class PipelineExecutor {
globalFlags: { ...projectContext.globalFlags },
artifactPointers: { ...projectContext.artifactPointers },
taskQueue: projectContext.taskQueue.map((task) => ({
id: task.id,
title: task.title,
taskId: task.taskId,
id: task.id ?? task.taskId,
...(task.title ? { title: task.title } : {}),
status: task.status,
...(task.worktreePath ? { worktreePath: task.worktreePath } : {}),
...(task.assignee ? { assignee: task.assignee } : {}),
...(task.metadata ? { metadata: task.metadata } : {}),
})),
@@ -886,6 +921,13 @@ export class PipelineExecutor {
})();
const context = await this.stateManager.buildFreshNodeContext(sessionId, node.id);
const taskResolution = this.options.taskLifecycle
? await this.options.taskLifecycle.prepareTaskExecution({
sessionId,
node,
context,
})
: undefined;
const prompt = this.personaRegistry.renderSystemPrompt({
personaId: node.personaId,
runtimeContext: {
@@ -901,10 +943,13 @@ export class PipelineExecutor {
node,
toolClearance,
prompt,
worktreePathOverride: taskResolution?.workingDirectory,
});
const result = await this.invokeActorExecutor({
sessionId,
attempt,
depth: recursiveDepth,
node,
prompt,
context,
@@ -921,12 +966,50 @@ export class PipelineExecutor {
customEvents: result.events,
});
const topologyKind: NodeTopologyKind = node.topology?.kind ?? "sequential";
const payloadForNext = result.payload ?? context.handoff.payload;
let payloadForNext: JsonObject = {
...context.handoff.payload,
...(result.payload ?? {}),
...(taskResolution
? {
taskId: taskResolution.taskId,
workingDirectory: taskResolution.workingDirectory,
worktreePath: taskResolution.worktreePath,
}
: {}),
};
const shouldRetry =
result.status === "validation_fail" &&
this.shouldRetryValidation(node) &&
attempt <= maxRetriesForNode;
if (taskResolution && this.options.taskLifecycle) {
const finalization = await this.options.taskLifecycle.finalizeTaskExecution({
sessionId,
node,
task: taskResolution,
result,
domainEvents,
});
for (const eventEmission of finalization?.additionalEvents ?? []) {
domainEvents.push(
createDomainEvent({
type: eventEmission.type,
source: "pipeline",
sessionId,
nodeId: node.id,
attempt,
payload: eventEmission.payload,
}),
);
}
if (finalization?.handoffPayloadPatch) {
payloadForNext = {
...payloadForNext,
...finalization.handoffPayloadPatch,
};
}
}
await this.lifecycleObserver.onNodeAttempt({
sessionId,
node,
@@ -1021,6 +1104,8 @@ export class PipelineExecutor {
private async invokeActorExecutor(input: {
sessionId: string;
attempt: number;
depth: number;
node: PipelineNode;
prompt: string;
context: NodeExecutionContext;
@@ -1033,12 +1118,20 @@ export class PipelineExecutor {
return await input.executor({
sessionId: input.sessionId,
attempt: input.attempt,
depth: input.depth,
node: input.node,
prompt: input.prompt,
context: input.context,
signal: input.signal,
executionContext: input.executionContext,
mcp: this.buildActorMcpContext(input.executionContext, input.prompt),
mcp: this.buildActorMcpContext({
sessionId: input.sessionId,
nodeId: input.node.id,
attempt: input.attempt,
executionContext: input.executionContext,
prompt: input.prompt,
}),
security: this.securityContext,
});
} catch (error) {
@@ -1079,9 +1172,15 @@ export class PipelineExecutor {
node: PipelineNode;
toolClearance: ToolClearancePolicy;
prompt: string;
worktreePathOverride?: string;
}): ResolvedExecutionContext {
const normalizedToolClearance = parseToolClearancePolicy(input.toolClearance);
const toolUniverse = this.resolveAvailableToolsForAttempt(normalizedToolClearance, input.prompt);
const worktreePath = input.worktreePathOverride ?? this.options.resolvedExecutionSecurityConstraints.worktreePath;
const toolUniverse = this.resolveAvailableToolsForAttempt({
toolClearance: normalizedToolClearance,
prompt: input.prompt,
worktreePath,
});
const allowedTools = this.resolveAllowedToolsForAttempt({
toolClearance: normalizedToolClearance,
toolUniverse,
@@ -1097,6 +1196,7 @@ export class PipelineExecutor {
allowedTools,
security: {
...this.options.resolvedExecutionSecurityConstraints,
worktreePath,
},
};
}
@@ -1119,15 +1219,20 @@ export class PipelineExecutor {
return [];
}
private resolveAvailableToolsForAttempt(toolClearance: ToolClearancePolicy, prompt: string): string[] {
private resolveAvailableToolsForAttempt(input: {
toolClearance: ToolClearancePolicy;
prompt: string;
worktreePath: string;
}): string[] {
if (!this.options.resolveMcpConfig) {
return [];
}
const resolved = this.options.resolveMcpConfig({
providerHint: "codex",
prompt,
toolClearance,
prompt: input.prompt,
workingDirectory: input.worktreePath,
toolClearance: input.toolClearance,
});
const rawServers = resolved.codexConfig?.mcp_servers;
@@ -1147,10 +1252,14 @@ export class PipelineExecutor {
return dedupeStrings(tools);
}
private buildActorMcpContext(
executionContext: ResolvedExecutionContext,
prompt: string,
): ActorExecutionMcpContext {
private buildActorMcpContext(input: {
sessionId: string;
nodeId: string;
attempt: number;
executionContext: ResolvedExecutionContext;
prompt: string;
}): ActorExecutionMcpContext {
const { executionContext, prompt } = input;
const toolPolicy = toAllowedToolPolicy(executionContext.allowedTools);
const filterToolsForProvider = (tools: string[]): string[] => {
const deduped = dedupeStrings(tools);
@@ -1161,6 +1270,7 @@ export class PipelineExecutor {
? this.options.resolveMcpConfig({
providerHint: "both",
prompt,
workingDirectory: executionContext.security.worktreePath,
toolClearance: toolPolicy,
})
: {};
@@ -1169,7 +1279,12 @@ export class PipelineExecutor {
executionContext.allowedTools,
);
const resolveConfig = (context: McpLoadContext = {}): LoadedMcpConfig => {
if (context.providerHint === "codex") {
const withWorkingDirectory: McpLoadContext = {
...context,
...(context.workingDirectory ? {} : { workingDirectory: executionContext.security.worktreePath }),
};
if (withWorkingDirectory.providerHint === "codex") {
return {
...(resolvedConfig.codexConfig ? { codexConfig: cloneMcpConfig(resolvedConfig).codexConfig } : {}),
...(resolvedConfig.sourcePath ? { sourcePath: resolvedConfig.sourcePath } : {}),
@@ -1179,7 +1294,7 @@ export class PipelineExecutor {
};
}
if (context.providerHint === "claude") {
if (withWorkingDirectory.providerHint === "claude") {
return {
...(resolvedConfig.claudeMcpServers
? { claudeMcpServers: cloneMcpConfig(resolvedConfig).claudeMcpServers }
@@ -1195,7 +1310,13 @@ export class PipelineExecutor {
};
const createToolPermissionHandler = (): ActorToolPermissionHandler =>
this.createToolPermissionHandler(executionContext.allowedTools);
this.createToolPermissionHandler({
allowedTools: executionContext.allowedTools,
violationMode: executionContext.security.violationMode,
sessionId: input.sessionId,
nodeId: input.nodeId,
attempt: input.attempt,
});
return {
allowedTools: [...executionContext.allowedTools],
@@ -1207,13 +1328,24 @@ export class PipelineExecutor {
};
}
private createToolPermissionHandler(allowedTools: readonly string[]): ActorToolPermissionHandler {
const allowset = new Set(allowedTools);
const caseInsensitiveAllowLookup = buildCaseInsensitiveToolLookup(allowedTools);
private createToolPermissionHandler(input: {
allowedTools: readonly string[];
violationMode: SecurityViolationHandling;
sessionId: string;
nodeId: string;
attempt: number;
}): ActorToolPermissionHandler {
const allowset = new Set(input.allowedTools);
const caseInsensitiveAllowLookup = buildCaseInsensitiveToolLookup(input.allowedTools);
const rulesEngine = this.securityContext?.rulesEngine;
const toolPolicy = toAllowedToolPolicy(allowedTools);
const toolPolicy = toAllowedToolPolicy(input.allowedTools);
const toolAuditContext = {
sessionId: input.sessionId,
nodeId: input.nodeId,
attempt: input.attempt,
};
return async (toolName, _input, options) => {
return async (toolName, toolInput, options) => {
const toolUseID = options.toolUseID;
if (options.signal.aborted) {
return {
@@ -1231,10 +1363,28 @@ export class PipelineExecutor {
caseInsensitiveLookup: caseInsensitiveAllowLookup,
});
if (!allowMatch) {
rulesEngine?.assertToolInvocationAllowed({
tool: candidates[0] ?? toolName,
toolClearance: toolPolicy,
});
if (rulesEngine) {
try {
rulesEngine.assertToolInvocationAllowed({
tool: candidates[0] ?? toolName,
toolClearance: toolPolicy,
context: toolAuditContext,
});
} catch (error) {
if (
!(input.violationMode === "dangerous_warn_only" && error instanceof SecurityViolationError)
) {
throw error;
}
}
}
if (input.violationMode === "dangerous_warn_only") {
return {
behavior: "allow",
updatedInput: toolInput,
...(toolUseID ? { toolUseID } : {}),
};
}
return {
behavior: "deny",
message: `Tool "${toolName}" is not in the resolved execution allowlist.`,
@@ -1246,10 +1396,12 @@ export class PipelineExecutor {
rulesEngine?.assertToolInvocationAllowed({
tool: allowMatch,
toolClearance: toolPolicy,
context: toolAuditContext,
});
return {
behavior: "allow",
updatedInput: toolInput,
...(toolUseID ? { toolUseID } : {}),
};
};

View File

@@ -5,12 +5,23 @@ import { deepCloneJson, isRecord, type JsonObject, type JsonValue } from "./type
export const PROJECT_CONTEXT_SCHEMA_VERSION = 1;
export type ProjectTaskStatus = "pending" | "in_progress" | "blocked" | "done";
export type ProjectTaskStatus =
| "pending"
| "in_progress"
| "review"
| "conflict"
| "resolving_conflict"
| "merged"
| "failed"
| "blocked"
| "done";
export type ProjectTask = {
id: string;
title: string;
taskId: string;
id?: string;
title?: string;
status: ProjectTaskStatus;
worktreePath?: string;
assignee?: string;
metadata?: JsonObject;
};
@@ -52,7 +63,17 @@ function toJsonObject(value: unknown, label: string): JsonObject {
}
function toTaskStatus(value: unknown, label: string): ProjectTaskStatus {
if (value === "pending" || value === "in_progress" || value === "blocked" || value === "done") {
if (
value === "pending" ||
value === "in_progress" ||
value === "review" ||
value === "conflict" ||
value === "resolving_conflict" ||
value === "merged" ||
value === "failed" ||
value === "blocked" ||
value === "done"
) {
return value;
}
throw new Error(`${label} has unsupported status "${String(value)}".`);
@@ -68,10 +89,28 @@ function toProjectTask(value: unknown, label: string): ProjectTask {
throw new Error(`${label}.assignee must be a non-empty string when provided.`);
}
const taskIdCandidate = value.taskId ?? value.id;
const taskId = assertNonEmptyString(taskIdCandidate, `${label}.taskId`);
const titleRaw = value.title;
if (titleRaw !== undefined && (typeof titleRaw !== "string" || titleRaw.trim().length === 0)) {
throw new Error(`${label}.title must be a non-empty string when provided.`);
}
const worktreePathRaw = value.worktreePath;
if (
worktreePathRaw !== undefined &&
(typeof worktreePathRaw !== "string" || worktreePathRaw.trim().length === 0)
) {
throw new Error(`${label}.worktreePath must be a non-empty string when provided.`);
}
return {
id: assertNonEmptyString(value.id, `${label}.id`),
title: assertNonEmptyString(value.title, `${label}.title`),
taskId,
id: taskId,
...(typeof titleRaw === "string" ? { title: titleRaw.trim() } : {}),
status: toTaskStatus(value.status, `${label}.status`),
...(typeof worktreePathRaw === "string" ? { worktreePath: worktreePathRaw.trim() } : {}),
...(typeof assignee === "string" ? { assignee: assignee.trim() } : {}),
...(value.metadata !== undefined
? { metadata: toJsonObject(value.metadata, `${label}.metadata`) }
@@ -157,10 +196,10 @@ function mergeUpsertTasks(current: ProjectTask[], upserts: ProjectTask[]): Proje
const byId = new Map<string, ProjectTask>();
for (const task of current) {
byId.set(task.id, task);
byId.set(task.taskId, task);
}
for (const task of upserts) {
byId.set(task.id, task);
byId.set(task.taskId, task);
}
return [...byId.values()];

View File

@@ -9,14 +9,16 @@ import {
import { isDomainEventType, type DomainEventEmission } from "../agents/domain-events.js";
import type { ActorExecutionInput, ActorExecutionResult, ActorExecutor } from "../agents/pipeline.js";
import { isRecord, type JsonObject, type JsonValue } from "../agents/types.js";
import { createSessionContext, type SessionContext } from "../examples/session-context.js";
import { ClaudeObservabilityLogger } from "../ui/claude-observability.js";
import { z } from "zod";
export type RunProvider = "codex" | "claude";
export type ProviderRunRuntime = {
provider: RunProvider;
config: Readonly<AppConfig>;
sessionContext: SessionContext;
sharedEnv: Record<string, string>;
claudeObservability: ClaudeObservabilityLogger;
close: () => Promise<void>;
};
@@ -28,6 +30,16 @@ type ProviderUsage = {
costUsd?: number;
};
function sanitizeEnv(input: Record<string, string | undefined>): Record<string, string> {
const output: Record<string, string> = {};
for (const [key, value] of Object.entries(input)) {
if (typeof value === "string") {
output[key] = value;
}
}
return output;
}
const ACTOR_RESPONSE_SCHEMA = {
type: "object",
additionalProperties: true,
@@ -72,10 +84,6 @@ const CLAUDE_OUTPUT_FORMAT = {
schema: ACTOR_RESPONSE_SCHEMA,
} as const;
const CLAUDE_PROVIDER_MAX_TURNS = 2;
import { z } from "zod";
const ActorResponseSchema = z.object({
status: z.enum(["success", "validation_fail", "failure"]),
payload: z.unknown().optional(),
@@ -85,7 +93,6 @@ const ActorResponseSchema = z.object({
failureKind: z.unknown().optional(),
failureCode: z.unknown().optional(),
});
function toErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
@@ -93,6 +100,23 @@ function toErrorMessage(error: unknown): string {
return String(error);
}
export function resolveProviderWorkingDirectory(actorInput: ActorExecutionInput): string {
return actorInput.executionContext.security.worktreePath;
}
export function buildProviderRuntimeEnv(input: {
runtime: ProviderRunRuntime;
actorInput: ActorExecutionInput;
includeClaudeAuth?: boolean;
}): Record<string, string> {
const workingDirectory = resolveProviderWorkingDirectory(input.actorInput);
return sanitizeEnv({
...input.runtime.sharedEnv,
...(input.includeClaudeAuth ? buildClaudeAuthEnv(input.runtime.config.provider) : {}),
AGENT_WORKTREE_PATH: workingDirectory,
});
}
function toJsonValue(value: unknown): JsonValue {
return JSON.parse(JSON.stringify(value)) as JsonValue;
}
@@ -338,7 +362,7 @@ function buildActorPrompt(input: ActorExecutionInput): string {
},
events: [
{
type: "requirements_defined | tasks_planned | code_committed | task_blocked | validation_passed | validation_failed | branch_merged",
type: "requirements_defined | tasks_planned | code_committed | task_ready_for_review | task_blocked | validation_passed | validation_failed | branch_merged | merge_conflict_detected | merge_conflict_resolved | merge_conflict_unresolved | merge_retry_started",
payload: {
summary: "optional",
details: {},
@@ -370,6 +394,7 @@ async function runCodexActor(input: {
const prompt = buildActorPrompt(actorInput);
const startedAt = Date.now();
const apiKey = resolveOpenAiApiKey(runtime.config.provider);
const workingDirectory = resolveProviderWorkingDirectory(actorInput);
const codex = new Codex({
...(apiKey ? { apiKey } : {}),
@@ -379,20 +404,21 @@ async function runCodexActor(input: {
...(actorInput.mcp.resolvedConfig.codexConfig
? { config: actorInput.mcp.resolvedConfig.codexConfig }
: {}),
env: runtime.sessionContext.runtimeInjection.env,
env: buildProviderRuntimeEnv({
runtime,
actorInput,
}),
});
const thread = codex.startThread({
workingDirectory: runtime.sessionContext.runtimeInjection.workingDirectory,
workingDirectory,
skipGitRepoCheck: runtime.config.provider.codexSkipGitCheck,
});
const turn = await runtime.sessionContext.runInSession(() =>
thread.run(prompt, {
signal: actorInput.signal,
outputSchema: ACTOR_RESPONSE_SCHEMA,
}),
);
const turn = await thread.run(prompt, {
signal: actorInput.signal,
outputSchema: ACTOR_RESPONSE_SCHEMA,
});
const usage: ProviderUsage = {
...(turn.usage
@@ -421,11 +447,46 @@ type ClaudeTurnResult = {
usage: ProviderUsage;
};
function toClaudeTraceContext(actorInput: ActorExecutionInput): {
sessionId: string;
nodeId: string;
attempt: number;
depth: number;
} {
return {
sessionId: actorInput.sessionId,
nodeId: actorInput.node.id,
attempt: actorInput.attempt,
depth: actorInput.depth,
};
}
function toProviderUsageJson(usage: ProviderUsage): JsonObject {
const output: JsonObject = {};
if (typeof usage.tokenInput === "number") {
output.tokenInput = usage.tokenInput;
}
if (typeof usage.tokenOutput === "number") {
output.tokenOutput = usage.tokenOutput;
}
if (typeof usage.tokenTotal === "number") {
output.tokenTotal = usage.tokenTotal;
}
if (typeof usage.durationMs === "number") {
output.durationMs = usage.durationMs;
}
if (typeof usage.costUsd === "number") {
output.costUsd = usage.costUsd;
}
return output;
}
function buildClaudeOptions(input: {
runtime: ProviderRunRuntime;
actorInput: ActorExecutionInput;
}): Options {
const { runtime, actorInput } = input;
const workingDirectory = resolveProviderWorkingDirectory(actorInput);
const authOptionOverrides = runtime.config.provider.anthropicOauthToken
? { authToken: runtime.config.provider.anthropicOauthToken }
@@ -434,13 +495,15 @@ function buildClaudeOptions(input: {
return token ? { apiKey: token } : {};
})();
const runtimeEnv = {
...runtime.sessionContext.runtimeInjection.env,
...buildClaudeAuthEnv(runtime.config.provider),
};
const runtimeEnv = buildProviderRuntimeEnv({
runtime,
actorInput,
includeClaudeAuth: true,
});
const traceContext = toClaudeTraceContext(actorInput);
return {
maxTurns: CLAUDE_PROVIDER_MAX_TURNS,
maxTurns: runtime.config.provider.claudeMaxTurns,
...(runtime.config.provider.claudeModel
? { model: runtime.config.provider.claudeModel }
: {}),
@@ -452,8 +515,11 @@ function buildClaudeOptions(input: {
? { mcpServers: actorInput.mcp.resolvedConfig.claudeMcpServers as Options["mcpServers"] }
: {}),
canUseTool: actorInput.mcp.createClaudeCanUseTool(),
cwd: runtime.sessionContext.runtimeInjection.workingDirectory,
cwd: workingDirectory,
env: runtimeEnv,
...runtime.claudeObservability.toOptionOverrides({
context: traceContext,
}),
outputFormat: CLAUDE_OUTPUT_FORMAT,
};
}
@@ -463,10 +529,19 @@ async function runClaudeTurn(input: {
actorInput: ActorExecutionInput;
prompt: string;
}): Promise<ClaudeTurnResult> {
const traceContext = toClaudeTraceContext(input.actorInput);
const options = buildClaudeOptions({
runtime: input.runtime,
actorInput: input.actorInput,
});
input.runtime.claudeObservability.recordQueryStarted({
context: traceContext,
data: {
...(options.model ? { model: options.model } : {}),
maxTurns: options.maxTurns ?? input.runtime.config.provider.claudeMaxTurns,
...(typeof options.cwd === "string" ? { cwd: options.cwd } : {}),
},
});
const startedAt = Date.now();
const stream = query({
@@ -477,6 +552,7 @@ async function runClaudeTurn(input: {
let resultText = "";
let structuredOutput: unknown;
let usage: ProviderUsage = {};
let messageCount = 0;
const onAbort = (): void => {
stream.close();
@@ -486,6 +562,12 @@ async function runClaudeTurn(input: {
try {
for await (const message of stream as AsyncIterable<SDKMessage>) {
messageCount += 1;
input.runtime.claudeObservability.recordMessage({
context: traceContext,
message,
});
if (message.type !== "result") {
continue;
}
@@ -507,6 +589,12 @@ async function runClaudeTurn(input: {
costUsd: message.total_cost_usd,
};
}
} catch (error) {
input.runtime.claudeObservability.recordQueryError({
context: traceContext,
error,
});
throw error;
} finally {
input.actorInput.signal.removeEventListener("abort", onAbort);
stream.close();
@@ -517,9 +605,22 @@ async function runClaudeTurn(input: {
}
if (!resultText) {
throw new Error("Claude run completed without a final result.");
const error = new Error("Claude run completed without a final result.");
input.runtime.claudeObservability.recordQueryError({
context: traceContext,
error,
});
throw error;
}
input.runtime.claudeObservability.recordQueryCompleted({
context: traceContext,
data: {
messageCount,
usage: toProviderUsageJson(usage),
},
});
return {
text: resultText,
structuredOutput,
@@ -535,13 +636,11 @@ async function runClaudeActor(input: {
actorInput: ActorExecutionInput;
}): Promise<ActorExecutionResult> {
const prompt = buildActorPrompt(input.actorInput);
const turn = await input.runtime.sessionContext.runInSession(() =>
runClaudeTurn({
runtime: input.runtime,
actorInput: input.actorInput,
prompt,
}),
);
const turn = await runClaudeTurn({
runtime: input.runtime,
actorInput: input.actorInput,
prompt,
});
const parsed = parseActorExecutionResultFromModelOutput({
rawText: turn.text,
@@ -556,21 +655,21 @@ async function runClaudeActor(input: {
export async function createProviderRunRuntime(input: {
provider: RunProvider;
initialPrompt: string;
config: Readonly<AppConfig>;
observabilityRootPath?: string;
baseEnv?: Record<string, string | undefined>;
}): Promise<ProviderRunRuntime> {
const sessionContext = await createSessionContext(input.provider, {
prompt: input.initialPrompt,
config: input.config,
const claudeObservability = new ClaudeObservabilityLogger({
workspaceRoot: input.observabilityRootPath ?? process.cwd(),
config: input.config.provider.claudeObservability,
});
return {
provider: input.provider,
config: input.config,
sessionContext,
close: async () => {
await sessionContext.close();
},
sharedEnv: sanitizeEnv(input.baseEnv ?? process.env),
claudeObservability,
close: async () => claudeObservability.close(),
};
}

View File

@@ -197,9 +197,9 @@ export class ResourceProvisioningOrchestrator {
async provisionSession(input: {
sessionId: string;
resources: ResourceRequest[];
workspaceRoot?: string;
workspaceRoot: string;
}): Promise<ProvisionedResources> {
const workspaceRoot = resolve(input.workspaceRoot ?? process.cwd());
const workspaceRoot = resolve(input.workspaceRoot);
const hardConstraints: ProvisionedResourcesState["hardConstraints"] = [];
const releases: ProvisionedResourcesState["releases"] = [];
const env: Record<string, string> = {};

View File

@@ -0,0 +1,872 @@
import { execFile } from "node:child_process";
import { randomUUID } from "node:crypto";
import { mkdir, readFile, readdir, stat } from "node:fs/promises";
import { dirname, isAbsolute, resolve } from "node:path";
import { promisify } from "node:util";
import { withFileLock, writeUtf8FileAtomic } from "./file-persistence.js";
const execFileAsync = promisify(execFile);
const SESSION_METADATA_FILE_NAME = "session-metadata.json";
export type SessionStatus = "active" | "suspended" | "closed" | "closed_with_conflicts";
export type SessionMetadata = {
sessionId: string;
projectPath: string;
sessionStatus: SessionStatus;
baseWorkspacePath: string;
createdAt: string;
updatedAt: string;
};
export type CreateSessionRequest = {
projectPath: string;
};
export type MergeTaskIntoBaseOutcome =
| {
kind: "success";
taskId: string;
worktreePath: string;
baseWorkspacePath: string;
}
| {
kind: "conflict";
taskId: string;
worktreePath: string;
baseWorkspacePath: string;
conflictFiles: string[];
mergeBase?: string;
}
| {
kind: "fatal_error";
taskId: string;
worktreePath: string;
baseWorkspacePath: string;
error: string;
mergeBase?: string;
};
export type CloseSessionOutcome =
| {
kind: "success";
sessionId: string;
mergedToProject: boolean;
}
| {
kind: "conflict";
sessionId: string;
worktreePath: string;
conflictFiles: string[];
mergeBase?: string;
baseBranch?: string;
}
| {
kind: "fatal_error";
sessionId: string;
error: string;
baseBranch?: string;
mergeBase?: string;
};
type GitExecutionResult = {
exitCode: number;
stdout: string;
stderr: string;
};
type GitWorktreeRecord = {
path: string;
branchRef?: string;
};
function toErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
return String(error);
}
function assertAbsolutePath(path: string, label: string): string {
if (!isAbsolute(path)) {
throw new Error(`${label} must be an absolute path.`);
}
return resolve(path);
}
function normalizeWorktreePath(path: string): string {
const normalized = resolve(path);
return normalized.startsWith("/private/var/") ? normalized.slice("/private".length) : normalized;
}
function assertNonEmptyString(value: unknown, label: string): string {
if (typeof value !== "string" || value.trim().length === 0) {
throw new Error(`${label} must be a non-empty string.`);
}
return value.trim();
}
function toSessionStatus(value: unknown): SessionStatus {
if (
value === "active" ||
value === "suspended" ||
value === "closed" ||
value === "closed_with_conflicts"
) {
return value;
}
throw new Error(`Session status "${String(value)}" is not supported.`);
}
function toSessionMetadata(value: unknown): SessionMetadata {
if (!value || typeof value !== "object" || Array.isArray(value)) {
throw new Error("Session metadata file is malformed.");
}
const raw = value as Record<string, unknown>;
return {
sessionId: assertNonEmptyString(raw.sessionId, "sessionId"),
projectPath: assertAbsolutePath(assertNonEmptyString(raw.projectPath, "projectPath"), "projectPath"),
baseWorkspacePath: assertAbsolutePath(
assertNonEmptyString(raw.baseWorkspacePath, "baseWorkspacePath"),
"baseWorkspacePath",
),
sessionStatus: toSessionStatus(raw.sessionStatus),
createdAt: assertNonEmptyString(raw.createdAt, "createdAt"),
updatedAt: assertNonEmptyString(raw.updatedAt, "updatedAt"),
};
}
async function runGit(args: string[]): Promise<string> {
const result = await runGitWithResult(args);
if (result.exitCode !== 0) {
throw new Error(`git ${args.join(" ")} failed: ${result.stderr || result.stdout || "unknown git error"}`);
}
return result.stdout.trim();
}
async function runGitWithResult(args: string[]): Promise<GitExecutionResult> {
try {
const { stdout, stderr } = await execFileAsync("git", args, {
encoding: "utf8",
});
return {
exitCode: 0,
stdout: stdout.trim(),
stderr: stderr.trim(),
};
} catch (error) {
const failure = error as {
code?: number | string;
stdout?: string;
stderr?: string;
};
if (typeof failure.code === "number") {
return {
exitCode: failure.code,
stdout: String(failure.stdout ?? "").trim(),
stderr: String(failure.stderr ?? "").trim(),
};
}
throw new Error(`git ${args.join(" ")} failed: ${toErrorMessage(error)}`);
}
}
async function pathExists(path: string): Promise<boolean> {
try {
await stat(path);
return true;
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return false;
}
throw error;
}
}
function sanitizeSegment(value: string, fallback: string): string {
const normalized = value
.trim()
.replace(/[^a-zA-Z0-9_-]/g, "-")
.replace(/-+/g, "-")
.replace(/^-+/, "")
.replace(/-+$/, "");
return normalized || fallback;
}
function toGitFailureMessage(result: GitExecutionResult): string {
const details = result.stderr || result.stdout || "unknown git error";
return `git command failed with exit code ${String(result.exitCode)}: ${details}`;
}
function toStringLines(value: string): string[] {
return value
.split("\n")
.map((line) => line.trim())
.filter((line) => line.length > 0);
}
function parseGitWorktreeRecords(value: string): GitWorktreeRecord[] {
const lines = value.split("\n");
const records: GitWorktreeRecord[] = [];
let current: GitWorktreeRecord | undefined;
for (const line of lines) {
if (!line.trim()) {
if (current) {
records.push(current);
current = undefined;
}
continue;
}
if (line.startsWith("worktree ")) {
if (current) {
records.push(current);
}
current = {
path: line.slice("worktree ".length).trim(),
};
continue;
}
if (line.startsWith("branch ") && current) {
current.branchRef = line.slice("branch ".length).trim();
}
}
if (current) {
records.push(current);
}
return records;
}
export class FileSystemSessionMetadataStore {
private readonly stateRoot: string;
constructor(input: { stateRoot: string }) {
this.stateRoot = resolve(input.stateRoot);
}
getStateRoot(): string {
return this.stateRoot;
}
getSessionDirectory(sessionId: string): string {
return resolve(this.stateRoot, sessionId);
}
getSessionMetadataPath(sessionId: string): string {
return resolve(this.getSessionDirectory(sessionId), SESSION_METADATA_FILE_NAME);
}
getSessionProjectContextPath(sessionId: string): string {
return resolve(this.getSessionDirectory(sessionId), "project-context.json");
}
async createSession(input: {
projectPath: string;
baseWorkspacePath: string;
sessionId?: string;
}): Promise<SessionMetadata> {
const sessionId = input.sessionId?.trim() || randomUUID();
const now = new Date().toISOString();
const metadata: SessionMetadata = {
sessionId,
projectPath: assertAbsolutePath(input.projectPath, "projectPath"),
baseWorkspacePath: assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath"),
sessionStatus: "active",
createdAt: now,
updatedAt: now,
};
const sessionDirectory = this.getSessionDirectory(sessionId);
await mkdir(sessionDirectory, { recursive: true });
await this.writeSessionMetadata(metadata);
return metadata;
}
async readSession(sessionId: string): Promise<SessionMetadata | undefined> {
const metadataPath = this.getSessionMetadataPath(sessionId);
try {
const content = await readFile(metadataPath, "utf8");
return toSessionMetadata(JSON.parse(content) as unknown);
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return undefined;
}
throw error;
}
}
async listSessions(): Promise<SessionMetadata[]> {
try {
const entries = await readdir(this.stateRoot, { withFileTypes: true });
const sessions: SessionMetadata[] = [];
for (const entry of entries) {
if (!entry.isDirectory()) {
continue;
}
const metadata = await this.readSession(entry.name);
if (metadata) {
sessions.push(metadata);
}
}
sessions.sort((left, right) => right.createdAt.localeCompare(left.createdAt));
return sessions;
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
return [];
}
throw error;
}
}
async updateSession(
sessionId: string,
patch: Partial<Pick<SessionMetadata, "projectPath" | "baseWorkspacePath" | "sessionStatus">>,
): Promise<SessionMetadata> {
const current = await this.readSession(sessionId);
if (!current) {
throw new Error(`Session "${sessionId}" does not exist.`);
}
const next: SessionMetadata = {
...current,
...(patch.projectPath ? { projectPath: assertAbsolutePath(patch.projectPath, "projectPath") } : {}),
...(patch.baseWorkspacePath
? { baseWorkspacePath: assertAbsolutePath(patch.baseWorkspacePath, "baseWorkspacePath") }
: {}),
...(patch.sessionStatus ? { sessionStatus: patch.sessionStatus } : {}),
updatedAt: new Date().toISOString(),
};
await this.writeSessionMetadata(next);
return next;
}
private async writeSessionMetadata(metadata: SessionMetadata): Promise<void> {
const metadataPath = this.getSessionMetadataPath(metadata.sessionId);
await mkdir(dirname(metadataPath), { recursive: true });
await withFileLock(`${metadataPath}.lock`, async () => {
await writeUtf8FileAtomic(metadataPath, `${JSON.stringify(metadata, null, 2)}\n`);
});
}
}
export class SessionWorktreeManager {
private readonly worktreeRoot: string;
private readonly baseRef: string;
private readonly targetPath?: string;
constructor(input: {
worktreeRoot: string;
baseRef: string;
targetPath?: string;
}) {
this.worktreeRoot = assertAbsolutePath(input.worktreeRoot, "worktreeRoot");
this.baseRef = assertNonEmptyString(input.baseRef, "baseRef");
this.targetPath = normalizeWorktreeTargetPath(input.targetPath, "targetPath");
}
resolveBaseWorkspacePath(sessionId: string): string {
const scoped = sanitizeSegment(sessionId, "session");
return resolve(this.worktreeRoot, scoped, "base");
}
resolveTaskWorktreePath(sessionId: string, taskId: string): string {
const scopedSession = sanitizeSegment(sessionId, "session");
const scopedTask = sanitizeSegment(taskId, "task");
return resolve(this.worktreeRoot, scopedSession, "tasks", scopedTask);
}
resolveWorkingDirectoryForWorktree(worktreePath: string): string {
const normalizedWorktreePath = assertAbsolutePath(worktreePath, "worktreePath");
return this.targetPath ? resolve(normalizedWorktreePath, this.targetPath) : normalizedWorktreePath;
}
private resolveBaseBranchName(sessionId: string): string {
const scoped = sanitizeSegment(sessionId, "session");
return `ai-ops/${scoped}/base`;
}
private resolveTaskBranchName(sessionId: string, taskId: string): string {
const scopedSession = sanitizeSegment(sessionId, "session");
const scopedTask = sanitizeSegment(taskId, "task");
return `ai-ops/${scopedSession}/task/${scopedTask}`;
}
async initializeSessionBaseWorkspace(input: {
sessionId: string;
projectPath: string;
baseWorkspacePath: string;
}): Promise<void> {
const projectPath = assertAbsolutePath(input.projectPath, "projectPath");
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
await mkdir(dirname(baseWorkspacePath), { recursive: true });
if (!(await pathExists(baseWorkspacePath))) {
const repoRoot = await runGit(["-C", projectPath, "rev-parse", "--show-toplevel"]);
const branchName = this.resolveBaseBranchName(input.sessionId);
await runGit(["-C", repoRoot, "worktree", "add", "-B", branchName, baseWorkspacePath, this.baseRef]);
}
await this.ensureWorktreeTargetPath(baseWorkspacePath);
}
async ensureTaskWorktree(input: {
sessionId: string;
taskId: string;
baseWorkspacePath: string;
existingWorktreePath?: string;
}): Promise<{
taskWorktreePath: string;
taskWorkingDirectory: string;
}> {
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
const maybeExisting = input.existingWorktreePath?.trim();
const worktreePath = maybeExisting
? assertAbsolutePath(maybeExisting, "existingWorktreePath")
: this.resolveTaskWorktreePath(input.sessionId, input.taskId);
const branchName = this.resolveTaskBranchName(input.sessionId, input.taskId);
const attachedWorktree = await this.findWorktreePathForBranch(baseWorkspacePath, branchName);
const normalizedWorktreePath = normalizeWorktreePath(worktreePath);
const normalizedAttachedWorktree = attachedWorktree ? normalizeWorktreePath(attachedWorktree) : undefined;
if (normalizedAttachedWorktree && normalizedAttachedWorktree !== normalizedWorktreePath) {
throw new Error(
`Task branch "${branchName}" is already attached to worktree "${attachedWorktree}", ` +
`expected "${worktreePath}".`,
);
}
if (!(await pathExists(worktreePath))) {
await runGit(["-C", baseWorkspacePath, "worktree", "prune", "--expire", "now"]);
}
if (!(await pathExists(worktreePath))) {
await mkdir(dirname(worktreePath), { recursive: true });
const addResult = await runGitWithResult([
"-C",
baseWorkspacePath,
"worktree",
"add",
"-B",
branchName,
worktreePath,
"HEAD",
]);
if (addResult.exitCode !== 0) {
const attachedAfterFailure = await this.findWorktreePathForBranch(baseWorkspacePath, branchName);
if (
attachedAfterFailure &&
normalizeWorktreePath(attachedAfterFailure) === normalizedWorktreePath &&
(await pathExists(worktreePath))
) {
const taskWorkingDirectory = await this.ensureWorktreeTargetPath(worktreePath);
return {
taskWorktreePath: worktreePath,
taskWorkingDirectory,
};
}
throw new Error(
`git -C ${baseWorkspacePath} worktree add -B ${branchName} ${worktreePath} HEAD failed: ` +
`${toGitFailureMessage(addResult)}`,
);
}
}
const taskWorkingDirectory = await this.ensureWorktreeTargetPath(worktreePath);
return {
taskWorktreePath: worktreePath,
taskWorkingDirectory,
};
}
async mergeTaskIntoBase(input: {
taskId: string;
baseWorkspacePath: string;
taskWorktreePath: string;
}): Promise<MergeTaskIntoBaseOutcome> {
const baseWorkspacePath = assertAbsolutePath(input.baseWorkspacePath, "baseWorkspacePath");
const taskWorktreePath = assertAbsolutePath(input.taskWorktreePath, "taskWorktreePath");
const taskId = input.taskId;
if (!(await pathExists(baseWorkspacePath))) {
throw new Error(`Base workspace "${baseWorkspacePath}" does not exist.`);
}
if (!(await pathExists(taskWorktreePath))) {
throw new Error(`Task worktree "${taskWorktreePath}" does not exist.`);
}
let mergeBase: string | undefined;
try {
await runGit(["-C", taskWorktreePath, "add", "-A"]);
const hasPending = await this.hasStagedChanges(taskWorktreePath);
if (hasPending) {
await runGit(["-C", taskWorktreePath, "commit", "-m", `ai_ops: finalize task ${taskId}`]);
}
const branchName = await runGit(["-C", taskWorktreePath, "rev-parse", "--abbrev-ref", "HEAD"]);
const baseBranch = await runGit(["-C", baseWorkspacePath, "rev-parse", "--abbrev-ref", "HEAD"]);
mergeBase = await this.tryReadMergeBase(baseWorkspacePath, baseBranch, branchName);
if (await this.hasOngoingMerge(taskWorktreePath)) {
return {
kind: "conflict",
taskId,
worktreePath: taskWorktreePath,
baseWorkspacePath,
conflictFiles: await this.readConflictFiles(taskWorktreePath),
...(mergeBase ? { mergeBase } : {}),
};
}
const syncTaskBranch = await runGitWithResult([
"-C",
taskWorktreePath,
"merge",
"--no-ff",
"--no-edit",
baseBranch,
]);
if (syncTaskBranch.exitCode === 1) {
return {
kind: "conflict",
taskId,
worktreePath: taskWorktreePath,
baseWorkspacePath,
conflictFiles: await this.readConflictFiles(taskWorktreePath),
...(mergeBase ? { mergeBase } : {}),
};
}
if (syncTaskBranch.exitCode !== 0) {
return {
kind: "fatal_error",
taskId,
worktreePath: taskWorktreePath,
baseWorkspacePath,
error: toGitFailureMessage(syncTaskBranch),
...(mergeBase ? { mergeBase } : {}),
};
}
if (await this.hasOngoingMerge(baseWorkspacePath)) {
return {
kind: "conflict",
taskId,
worktreePath: baseWorkspacePath,
baseWorkspacePath,
conflictFiles: await this.readConflictFiles(baseWorkspacePath),
...(mergeBase ? { mergeBase } : {}),
};
}
const mergeIntoBase = await runGitWithResult([
"-C",
baseWorkspacePath,
"merge",
"--no-ff",
"--no-edit",
branchName,
]);
if (mergeIntoBase.exitCode === 1) {
return {
kind: "conflict",
taskId,
worktreePath: baseWorkspacePath,
baseWorkspacePath,
conflictFiles: await this.readConflictFiles(baseWorkspacePath),
...(mergeBase ? { mergeBase } : {}),
};
}
if (mergeIntoBase.exitCode !== 0) {
return {
kind: "fatal_error",
taskId,
worktreePath: taskWorktreePath,
baseWorkspacePath,
error: toGitFailureMessage(mergeIntoBase),
...(mergeBase ? { mergeBase } : {}),
};
}
await this.removeWorktree({
repoPath: baseWorkspacePath,
worktreePath: taskWorktreePath,
});
return {
kind: "success",
taskId,
worktreePath: taskWorktreePath,
baseWorkspacePath,
};
} catch (error) {
return {
kind: "fatal_error",
taskId,
worktreePath: taskWorktreePath,
baseWorkspacePath,
error: toErrorMessage(error),
...(mergeBase ? { mergeBase } : {}),
};
}
}
async closeSession(input: {
session: SessionMetadata;
taskWorktreePaths: string[];
mergeBaseIntoProject?: boolean;
}): Promise<CloseSessionOutcome> {
const projectPath = assertAbsolutePath(input.session.projectPath, "projectPath");
const baseWorkspacePath = assertAbsolutePath(input.session.baseWorkspacePath, "baseWorkspacePath");
if (!(await pathExists(projectPath))) {
throw new Error(`Project path "${projectPath}" does not exist.`);
}
if (!(await pathExists(baseWorkspacePath))) {
throw new Error(`Base workspace "${baseWorkspacePath}" does not exist.`);
}
let baseBranch: string | undefined;
let mergeBase: string | undefined;
try {
for (const taskWorktreePath of input.taskWorktreePaths) {
if (!taskWorktreePath.trim()) {
continue;
}
await this.removeWorktree({
repoPath: baseWorkspacePath,
worktreePath: taskWorktreePath,
});
}
if (input.mergeBaseIntoProject) {
baseBranch = await runGit(["-C", baseWorkspacePath, "rev-parse", "--abbrev-ref", "HEAD"]);
mergeBase = await this.tryReadMergeBase(projectPath, "HEAD", baseBranch);
if (await this.hasOngoingMerge(projectPath)) {
return {
kind: "conflict",
sessionId: input.session.sessionId,
worktreePath: projectPath,
conflictFiles: await this.readConflictFiles(projectPath),
...(baseBranch ? { baseBranch } : {}),
...(mergeBase ? { mergeBase } : {}),
};
}
const mergeResult = await runGitWithResult([
"-C",
projectPath,
"merge",
"--no-ff",
"--no-edit",
baseBranch,
]);
if (mergeResult.exitCode === 1) {
return {
kind: "conflict",
sessionId: input.session.sessionId,
worktreePath: projectPath,
conflictFiles: await this.readConflictFiles(projectPath),
...(baseBranch ? { baseBranch } : {}),
...(mergeBase ? { mergeBase } : {}),
};
}
if (mergeResult.exitCode !== 0) {
return {
kind: "fatal_error",
sessionId: input.session.sessionId,
error: toGitFailureMessage(mergeResult),
...(baseBranch ? { baseBranch } : {}),
...(mergeBase ? { mergeBase } : {}),
};
}
}
await this.removeWorktree({
repoPath: projectPath,
worktreePath: baseWorkspacePath,
});
return {
kind: "success",
sessionId: input.session.sessionId,
mergedToProject: input.mergeBaseIntoProject === true,
};
} catch (error) {
return {
kind: "fatal_error",
sessionId: input.session.sessionId,
error: toErrorMessage(error),
...(baseBranch ? { baseBranch } : {}),
...(mergeBase ? { mergeBase } : {}),
};
}
}
private async removeWorktree(input: {
repoPath: string;
worktreePath: string;
}): Promise<void> {
if (!(await pathExists(input.worktreePath))) {
return;
}
await runGit(["-C", input.repoPath, "worktree", "remove", "--force", input.worktreePath]);
await runGit(["-C", input.repoPath, "worktree", "prune"]);
}
private async hasStagedChanges(worktreePath: string): Promise<boolean> {
try {
await execFileAsync("git", ["-C", worktreePath, "diff", "--cached", "--quiet"], {
encoding: "utf8",
});
return false;
} catch (error) {
const exitCode = (error as { code?: number }).code;
if (exitCode === 1) {
return true;
}
throw new Error(`Unable to inspect staged changes: ${toErrorMessage(error)}`);
}
}
private async hasOngoingMerge(worktreePath: string): Promise<boolean> {
const result = await runGitWithResult([
"-C",
worktreePath,
"rev-parse",
"-q",
"--verify",
"MERGE_HEAD",
]);
return result.exitCode === 0;
}
private async readConflictFiles(worktreePath: string): Promise<string[]> {
const result = await runGitWithResult([
"-C",
worktreePath,
"diff",
"--name-only",
"--diff-filter=U",
]);
if (result.exitCode !== 0) {
return [];
}
return toStringLines(result.stdout);
}
private async tryReadMergeBase(
repoPath: string,
leftRef: string,
rightRef: string,
): Promise<string | undefined> {
const result = await runGitWithResult(["-C", repoPath, "merge-base", leftRef, rightRef]);
if (result.exitCode !== 0) {
return undefined;
}
const mergeBase = result.stdout.trim();
return mergeBase || undefined;
}
private async findWorktreePathForBranch(
repoPath: string,
branchName: string,
): Promise<string | undefined> {
const branchRef = `refs/heads/${branchName}`;
const records = await this.listWorktreeRecords(repoPath);
const matched = records.find((record) => record.branchRef === branchRef);
if (!matched) {
return undefined;
}
return resolve(matched.path);
}
private async listWorktreeRecords(repoPath: string): Promise<GitWorktreeRecord[]> {
const result = await runGitWithResult(["-C", repoPath, "worktree", "list", "--porcelain"]);
if (result.exitCode !== 0) {
return [];
}
return parseGitWorktreeRecords(result.stdout);
}
private async ensureWorktreeTargetPath(worktreePath: string): Promise<string> {
if (this.targetPath) {
await runGit(["-C", worktreePath, "sparse-checkout", "init", "--cone"]);
await runGit(["-C", worktreePath, "sparse-checkout", "set", this.targetPath]);
}
const workingDirectory = this.resolveWorkingDirectoryForWorktree(worktreePath);
let workingDirectoryStats;
try {
workingDirectoryStats = await stat(workingDirectory);
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
if (this.targetPath) {
throw new Error(
`Configured worktree target path "${this.targetPath}" is not a directory in ref "${this.baseRef}".`,
);
}
throw new Error(`Worktree path "${workingDirectory}" does not exist.`);
}
throw error;
}
if (!workingDirectoryStats.isDirectory()) {
if (this.targetPath) {
throw new Error(
`Configured worktree target path "${this.targetPath}" is not a directory in ref "${this.baseRef}".`,
);
}
throw new Error(`Worktree path "${workingDirectory}" is not a directory.`);
}
return workingDirectory;
}
}
function normalizeWorktreeTargetPath(value: string | undefined, key: string): string | undefined {
if (value === undefined) {
return undefined;
}
const trimmed = value.trim();
if (trimmed.length === 0) {
return undefined;
}
const slashNormalized = trimmed.replace(/\\/g, "/");
if (isAbsolute(slashNormalized) || /^[a-zA-Z]:\//.test(slashNormalized)) {
throw new Error(`${key} must be a relative path within the repository worktree.`);
}
const normalizedSegments = slashNormalized
.split("/")
.map((segment) => segment.trim())
.filter((segment) => segment.length > 0 && segment !== ".");
if (normalizedSegments.some((segment) => segment === "..")) {
throw new Error(`${key} must not contain ".." path segments.`);
}
if (normalizedSegments.length === 0) {
return undefined;
}
return normalizedSegments.join("/");
}