feat(uok): enforce gates, port gsd2 modules, flip flags to on-by-default
- Wire plan-gate in runDispatch() and verification gate in runFinalize() - Add planningFlow gate persistence in guided-flow.ts - Add execution-graph gate event in auto-dispatch.ts - Flip all UOK feature flags from opt-in (=== true) to on-by-default (?? true) - Port dispatch-envelope.ts, parity-report.ts, writer.ts from gsd2 - Add DispatchReasonCode, UokDispatchEnvelope, WriterToken, WriteRecord, WriteSequence, DispatchExplanation to contracts.ts - Add "refine" to UokNodeKind - Extend auto-worktree.ts with workspace.after_create hook support - Add workspace.after_create to preferences-types and preferences-validation Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
0399bb9c8c
commit
851bd7fca3
16 changed files with 707 additions and 72 deletions
|
|
@ -67,6 +67,7 @@ import {
|
|||
import type { SFState } from "./types.js";
|
||||
import { selectReactiveDispatchBatch } from "./uok/execution-graph.js";
|
||||
import { resolveUokFlags } from "./uok/flags.js";
|
||||
import { UokGateRunner } from "./uok/gate-runner.js";
|
||||
import { EXECUTION_ENTRY_PHASES } from "./uok/plan-v2.js";
|
||||
import { extractVerdict, isAcceptableUatVerdict } from "./verdict-parser.js";
|
||||
import { logError, logWarning } from "./workflow-logger.js";
|
||||
|
|
@ -960,6 +961,7 @@ export const DISPATCH_RULES: DispatchRule[] = [
|
|||
// execution remains sequential
|
||||
if (maxParallel <= 1) return null;
|
||||
|
||||
const uokFlags = resolveUokFlags(prefs);
|
||||
try {
|
||||
const {
|
||||
loadSliceTaskIO,
|
||||
|
|
@ -984,8 +986,6 @@ export const DISPATCH_RULES: DispatchRule[] = [
|
|||
|
||||
// Only activate reactive dispatch when >1 task is ready
|
||||
if (readyIds.length <= 1) return null;
|
||||
|
||||
const uokFlags = resolveUokFlags(prefs);
|
||||
const selected = uokFlags.executionGraph
|
||||
? selectReactiveDispatchBatch({
|
||||
graph,
|
||||
|
|
@ -1033,9 +1033,36 @@ export const DISPATCH_RULES: DispatchRule[] = [
|
|||
};
|
||||
} catch (err) {
|
||||
// Non-fatal — fall through to sequential execution
|
||||
const errMsg = (err as Error).message;
|
||||
logError("dispatch", "reactive graph derivation failed", {
|
||||
error: (err as Error).message,
|
||||
error: errMsg,
|
||||
});
|
||||
// Persist execution-graph failure to gate audit when gates are enabled
|
||||
if (uokFlags.executionGraph && uokFlags.gates) {
|
||||
const egRunner = new UokGateRunner();
|
||||
egRunner.register({
|
||||
id: "execution-graph-gate",
|
||||
type: "execution",
|
||||
execute: async () => ({
|
||||
outcome: "fail" as const,
|
||||
failureClass: "execution" as const,
|
||||
rationale: "reactive graph derivation failed — falling back to sequential",
|
||||
findings: errMsg,
|
||||
}),
|
||||
});
|
||||
egRunner
|
||||
.run("execution-graph-gate", {
|
||||
basePath,
|
||||
traceId: `dispatch:${mid}/${sid}`,
|
||||
turnId: `${mid}/${sid}`,
|
||||
milestoneId: mid,
|
||||
sliceId: sid,
|
||||
unitType: "reactive-execute",
|
||||
})
|
||||
.catch(() => {
|
||||
/* gate telemetry must never block dispatch */
|
||||
});
|
||||
}
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -983,6 +983,7 @@ function syncMilestoneDir(
|
|||
* Failure is non-fatal — returns the error message or null on success.
|
||||
*
|
||||
* Reads the hook path from git.worktree_post_create in preferences.
|
||||
* Also runs workspace.after_create (inline shell script) if configured.
|
||||
* Pass hookPath directly to bypass preference loading (useful for testing).
|
||||
*/
|
||||
export function runWorktreePostCreateHook(
|
||||
|
|
@ -990,53 +991,85 @@ export function runWorktreePostCreateHook(
|
|||
worktreeDir: string,
|
||||
hookPath?: string,
|
||||
): string | null {
|
||||
if (hookPath === undefined) {
|
||||
const prefs = loadEffectiveSFPreferences()?.preferences?.git;
|
||||
hookPath = prefs?.worktree_post_create;
|
||||
}
|
||||
if (!hookPath) return null;
|
||||
const errors: string[] = [];
|
||||
|
||||
// Resolve relative paths against the source project root.
|
||||
// On Windows, convert 8.3 short paths (e.g. RUNNER~1) to long paths
|
||||
// so execFileSync can locate the file correctly.
|
||||
let resolved = isAbsolute(hookPath) ? hookPath : join(sourceDir, hookPath);
|
||||
if (!existsSync(resolved)) {
|
||||
return `Worktree post-create hook not found: ${resolved}`;
|
||||
// ── Legacy file-path hook (git.worktree_post_create) ─────────────────────
|
||||
let resolvedHookPath = hookPath;
|
||||
if (resolvedHookPath === undefined) {
|
||||
const prefs = loadEffectiveSFPreferences()?.preferences?.git;
|
||||
resolvedHookPath = prefs?.worktree_post_create;
|
||||
}
|
||||
if (process.platform === "win32") {
|
||||
try {
|
||||
resolved = realpathSync.native(resolved);
|
||||
} catch (err) {
|
||||
/* keep original */
|
||||
logWarning(
|
||||
"worktree",
|
||||
`realpath failed: ${err instanceof Error ? err.message : String(err)}`,
|
||||
);
|
||||
if (resolvedHookPath) {
|
||||
// Resolve relative paths against the source project root.
|
||||
// On Windows, convert 8.3 short paths (e.g. RUNNER~1) to long paths
|
||||
// so execFileSync can locate the file correctly.
|
||||
let resolved = isAbsolute(resolvedHookPath)
|
||||
? resolvedHookPath
|
||||
: join(sourceDir, resolvedHookPath);
|
||||
if (!existsSync(resolved)) {
|
||||
errors.push(`Worktree post-create hook not found: ${resolved}`);
|
||||
} else {
|
||||
if (process.platform === "win32") {
|
||||
try {
|
||||
resolved = realpathSync.native(resolved);
|
||||
} catch (err) {
|
||||
/* keep original */
|
||||
logWarning(
|
||||
"worktree",
|
||||
`realpath failed: ${err instanceof Error ? err.message : String(err)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// .bat/.cmd files on Windows require shell mode — execFileSync cannot
|
||||
// spawn them directly (EINVAL).
|
||||
const needsShell =
|
||||
process.platform === "win32" && /\.(bat|cmd)$/i.test(resolved);
|
||||
execFileSync(resolved, [], {
|
||||
cwd: worktreeDir,
|
||||
env: {
|
||||
...process.env,
|
||||
SOURCE_DIR: sourceDir,
|
||||
WORKTREE_DIR: worktreeDir,
|
||||
},
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
encoding: "utf-8",
|
||||
timeout: 30_000,
|
||||
shell: needsShell,
|
||||
});
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
errors.push(`Worktree post-create hook failed: ${msg}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// .bat/.cmd files on Windows require shell mode — execFileSync cannot
|
||||
// spawn them directly (EINVAL).
|
||||
const needsShell =
|
||||
process.platform === "win32" && /\.(bat|cmd)$/i.test(resolved);
|
||||
execFileSync(resolved, [], {
|
||||
cwd: worktreeDir,
|
||||
env: {
|
||||
...process.env,
|
||||
SOURCE_DIR: sourceDir,
|
||||
WORKTREE_DIR: worktreeDir,
|
||||
},
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
encoding: "utf-8",
|
||||
timeout: 30_000, // 30 second timeout
|
||||
shell: needsShell,
|
||||
});
|
||||
return null;
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
return `Worktree post-create hook failed: ${msg}`;
|
||||
// ── Inline script hook (workspace.after_create) ───────────────────────────
|
||||
// Only read from prefs when hookPath was not passed explicitly (testing path).
|
||||
if (hookPath === undefined) {
|
||||
const afterCreate =
|
||||
loadEffectiveSFPreferences()?.preferences?.workspace?.after_create;
|
||||
if (afterCreate) {
|
||||
try {
|
||||
execFileSync("sh", ["-c", afterCreate], {
|
||||
cwd: worktreeDir,
|
||||
env: {
|
||||
...process.env,
|
||||
SOURCE_DIR: sourceDir,
|
||||
WORKTREE_DIR: worktreeDir,
|
||||
},
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
encoding: "utf-8",
|
||||
timeout: 60_000,
|
||||
});
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
errors.push(`workspace.after_create hook failed: ${msg}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return errors.length > 0 ? errors.join("; ") : null;
|
||||
}
|
||||
|
||||
// ─── Auto-Worktree Branch Naming ───────────────────────────────────────────
|
||||
|
|
|
|||
|
|
@ -43,7 +43,11 @@ import { debugLog } from "../debug-logger.js";
|
|||
import { PROJECT_FILES } from "../detection.js";
|
||||
import { MergeConflictError } from "../git-service.js";
|
||||
import { recordLearnedOutcome } from "../learning/runtime.js";
|
||||
import { sfRoot } from "../paths.js";
|
||||
import {
|
||||
resolveMilestoneFile,
|
||||
resolveSliceFile,
|
||||
sfRoot,
|
||||
} from "../paths.js";
|
||||
import { resolvePersistModelChanges } from "../preferences.js";
|
||||
import {
|
||||
approveProductionMutationWithLlmPolicy,
|
||||
|
|
@ -667,6 +671,7 @@ export async function runPreDispatch(
|
|||
const result = await startSliceParallel(s.basePath, mid, eligible, {
|
||||
maxWorkers: prefs.slice_parallel.max_workers ?? 2,
|
||||
useExecutionGraph: uokFlags.executionGraph,
|
||||
shellWrapper: prefs.shell_wrapper,
|
||||
});
|
||||
if (result.started.length > 0) {
|
||||
ctx.ui.notify(
|
||||
|
|
@ -1354,6 +1359,67 @@ export async function runDispatch(
|
|||
return { action: "break", reason: "prior-slice-blocker" };
|
||||
}
|
||||
|
||||
// ── UOK Plan-gate ──────────────────────────────────────────────────────────
|
||||
// Structural validation before execute-task units: confirms the plan files
|
||||
// exist. FailureClass "input" → 0 retries (broken plan needs human fix, not
|
||||
// an LLM retry). Only fires when uok.gates.enabled is true.
|
||||
const uokFlagsDispatch = resolveUokFlags(prefs);
|
||||
if (uokFlagsDispatch.gates && unitType === "execute-task") {
|
||||
let planGateOutcome: "pass" | "fail" = "pass";
|
||||
let planGateRationale = "";
|
||||
|
||||
if (!mid) {
|
||||
planGateOutcome = "fail";
|
||||
planGateRationale = "No active milestone for execute-task dispatch";
|
||||
} else {
|
||||
const roadmapPath = resolveMilestoneFile(s.basePath, mid, "ROADMAP");
|
||||
if (!roadmapPath || !existsSync(roadmapPath)) {
|
||||
planGateOutcome = "fail";
|
||||
planGateRationale = `Milestone roadmap not found for ${mid}`;
|
||||
} else if (state.activeSlice?.id) {
|
||||
const slicePlanPath = resolveSliceFile(
|
||||
s.basePath,
|
||||
mid,
|
||||
state.activeSlice.id,
|
||||
"PLAN",
|
||||
);
|
||||
if (!slicePlanPath || !existsSync(slicePlanPath)) {
|
||||
planGateOutcome = "fail";
|
||||
planGateRationale = `Slice plan not found for ${mid}/${state.activeSlice.id}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const planGateRunner = new UokGateRunner();
|
||||
planGateRunner.register({
|
||||
id: "plan-gate",
|
||||
type: "policy",
|
||||
execute: async () => ({
|
||||
outcome: planGateOutcome,
|
||||
failureClass: planGateOutcome === "pass" ? "none" : "input",
|
||||
rationale: planGateRationale || "Plan files verified",
|
||||
}),
|
||||
});
|
||||
const planGateResult = await planGateRunner.run("plan-gate", {
|
||||
basePath: s.basePath,
|
||||
traceId: `dispatch:${ic.flowId}`,
|
||||
turnId: `iter-${ic.iteration}`,
|
||||
milestoneId: mid ?? undefined,
|
||||
sliceId: state.activeSlice?.id ?? undefined,
|
||||
unitType,
|
||||
unitId,
|
||||
});
|
||||
if (planGateResult.outcome !== "pass") {
|
||||
ctx.ui.notify(
|
||||
`Plan gate: ${planGateResult.rationale ?? "plan validation failed"} — pausing.`,
|
||||
"warning",
|
||||
);
|
||||
await deps.pauseAuto(ctx, pi);
|
||||
debugLog("autoLoop", { phase: "exit", reason: "plan-gate-failed" });
|
||||
return { action: "break", reason: "plan-gate-failed" };
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
action: "next",
|
||||
data: {
|
||||
|
|
@ -2647,6 +2713,34 @@ export async function runFinalize(
|
|||
// Hook sidecar items skip verification entirely.
|
||||
// Non-hook sidecar items run verification but skip retries (just continue).
|
||||
const skipVerification = sidecarItem?.kind === "hook";
|
||||
const uokFlagsFinalize = resolveUokFlags(ic.prefs);
|
||||
const runVerifyGate =
|
||||
uokFlagsFinalize.gates &&
|
||||
iterData.unitType === "execute-task" &&
|
||||
!skipVerification;
|
||||
|
||||
async function emitVerificationGate(
|
||||
outcome: "pass" | "fail",
|
||||
failureClass: "none" | "verification" | "manual-attention",
|
||||
rationale: string,
|
||||
): Promise<void> {
|
||||
if (!runVerifyGate) return;
|
||||
const vgRunner = new UokGateRunner();
|
||||
vgRunner.register({
|
||||
id: "unit-verification-gate",
|
||||
type: "verification",
|
||||
execute: async () => ({ outcome, failureClass, rationale }),
|
||||
});
|
||||
await vgRunner.run("unit-verification-gate", {
|
||||
basePath: s.basePath,
|
||||
traceId: `finalize:${ic.flowId}`,
|
||||
turnId: `iter-${ic.iteration}`,
|
||||
milestoneId: iterData.mid ?? undefined,
|
||||
unitType: iterData.unitType,
|
||||
unitId: iterData.unitId,
|
||||
});
|
||||
}
|
||||
|
||||
if (!skipVerification) {
|
||||
const verificationResult = await deps.runPostUnitVerification(
|
||||
{ s, ctx, pi },
|
||||
|
|
@ -2664,6 +2758,11 @@ export async function runFinalize(
|
|||
verificationPassed: false,
|
||||
},
|
||||
);
|
||||
await emitVerificationGate(
|
||||
"fail",
|
||||
"manual-attention",
|
||||
"Post-unit verification paused — requires human attention",
|
||||
);
|
||||
debugLog("autoLoop", { phase: "exit", reason: "verification-pause" });
|
||||
return { action: "break", reason: "verification-pause" };
|
||||
}
|
||||
|
|
@ -2679,6 +2778,11 @@ export async function runFinalize(
|
|||
verificationPassed: false,
|
||||
},
|
||||
);
|
||||
await emitVerificationGate(
|
||||
"fail",
|
||||
"verification",
|
||||
"Post-unit verification failed — retrying unit",
|
||||
);
|
||||
if (sidecarItem) {
|
||||
// Sidecar verification retries are skipped — just continue
|
||||
debugLog("autoLoop", {
|
||||
|
|
@ -2695,6 +2799,9 @@ export async function runFinalize(
|
|||
return { action: "continue" };
|
||||
}
|
||||
}
|
||||
|
||||
// Verification passed — record gate pass for audit/db
|
||||
await emitVerificationGate("pass", "none", "Post-unit verification passed");
|
||||
}
|
||||
|
||||
// Post-verification processing (DB dual-write, hooks, triage, quick-tasks)
|
||||
|
|
|
|||
|
|
@ -84,6 +84,7 @@ import {
|
|||
listUnitRuntimeRecords,
|
||||
} from "./unit-runtime.js";
|
||||
import { resolveUokFlags } from "./uok/flags.js";
|
||||
import { UokGateRunner } from "./uok/gate-runner.js";
|
||||
import { ensurePlanV2Graph as ensurePlanningFlowGraph } from "./uok/plan-v2.js";
|
||||
import { validateDirectory } from "./validate-directory.js";
|
||||
import {
|
||||
|
|
@ -172,23 +173,58 @@ function needsPlanningFlowGate(state: SFState): boolean {
|
|||
);
|
||||
}
|
||||
|
||||
function runPlanningFlowGate(
|
||||
async function runPlanningFlowGate(
|
||||
ctx: ExtensionContext,
|
||||
basePath: string,
|
||||
state: SFState,
|
||||
): boolean {
|
||||
): Promise<boolean> {
|
||||
const prefs = loadEffectiveSFPreferences()?.preferences;
|
||||
const uokFlags = resolveUokFlags(prefs);
|
||||
if (!uokFlags.planningFlow || !needsPlanningFlowGate(state)) return true;
|
||||
const compiled = ensurePlanningFlowGraph(basePath, state);
|
||||
|
||||
const milestoneId = state.activeMilestone?.id ?? undefined;
|
||||
const traceId = `guided-flow:${milestoneId ?? "unknown"}`;
|
||||
const turnId = `guided-${Date.now()}`;
|
||||
|
||||
const persistGate = async (
|
||||
outcome: "pass" | "manual-attention",
|
||||
failureClass: "none" | "manual-attention",
|
||||
rationale: string,
|
||||
findings = "",
|
||||
): Promise<void> => {
|
||||
if (!uokFlags.gates) return;
|
||||
const gateRunner = new UokGateRunner();
|
||||
gateRunner.register({
|
||||
id: "planning-flow-gate",
|
||||
type: "policy",
|
||||
execute: async () => ({ outcome, failureClass, rationale, findings }),
|
||||
});
|
||||
await gateRunner.run("planning-flow-gate", {
|
||||
basePath,
|
||||
traceId,
|
||||
turnId,
|
||||
milestoneId,
|
||||
unitType: "pre-dispatch",
|
||||
unitId: "guided-flow",
|
||||
});
|
||||
};
|
||||
|
||||
if (!compiled.ok) {
|
||||
const reason = compiled.reason ?? "planning-flow compilation failed";
|
||||
await persistGate(
|
||||
"manual-attention",
|
||||
"manual-attention",
|
||||
"planning flow compile gate failed",
|
||||
reason,
|
||||
);
|
||||
ctx.ui.notify(
|
||||
`Plan gate failed-closed: ${reason}. Complete plan/discuss artifacts before execution.`,
|
||||
"error",
|
||||
);
|
||||
return false;
|
||||
}
|
||||
await persistGate("pass", "none", "planning flow compile gate passed");
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
@ -1862,7 +1898,7 @@ export async function showWorkflowEntry(
|
|||
logWarning("guided", `STATE.md rebuild failed: ${(err as Error).message}`);
|
||||
}
|
||||
|
||||
if (!runPlanningFlowGate(ctx, basePath, state)) return;
|
||||
if (!(await runPlanningFlowGate(ctx, basePath, state))) return;
|
||||
|
||||
if (!state.activeMilestone?.id) {
|
||||
// Guard: if a discuss session is already in flight, don't re-inject the prompt.
|
||||
|
|
|
|||
|
|
@ -668,7 +668,22 @@ export function spawnWorker(basePath: string, milestoneId: string): boolean {
|
|||
workerEnv.SF_WORKER_MODEL = state.config.worker_model;
|
||||
}
|
||||
|
||||
child = spawn(process.execPath, [binPath, "headless", "--json", "auto"], {
|
||||
const shellWrapper = state.config.shell_wrapper ?? [];
|
||||
const [spawnCmd, spawnArgs] =
|
||||
shellWrapper.length > 0
|
||||
? [
|
||||
shellWrapper[0],
|
||||
[
|
||||
...shellWrapper.slice(1),
|
||||
process.execPath,
|
||||
binPath,
|
||||
"headless",
|
||||
"--json",
|
||||
"auto",
|
||||
],
|
||||
]
|
||||
: [process.execPath, [binPath, "headless", "--json", "auto"]];
|
||||
child = spawn(spawnCmd, spawnArgs, {
|
||||
cwd: worker.worktreePath,
|
||||
env: workerEnv,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
|
|
|
|||
|
|
@ -148,6 +148,8 @@ export const KNOWN_PREFERENCE_KEYS = new Set<string>([
|
|||
"discuss_web_research",
|
||||
"discuss_depth",
|
||||
"flat_rate_providers",
|
||||
"shell_wrapper",
|
||||
"workspace",
|
||||
]);
|
||||
|
||||
/** Canonical list of all dispatch unit types. */
|
||||
|
|
@ -614,6 +616,39 @@ export interface SFPreferences {
|
|||
* `provider_model_allow`.
|
||||
*/
|
||||
provider_model_block?: Record<string, string[]>;
|
||||
|
||||
/**
|
||||
* Shell wrapper prepended when SF spawns sub-processes (parallel milestone
|
||||
* workers, slice workers). Useful for projects that require a specific shell
|
||||
* environment such as a Nix development shell.
|
||||
*
|
||||
* Each element is a separate argument. The worker is launched as:
|
||||
* shell_wrapper[0] [...shell_wrapper.slice(1)] <node> <sf-loader> headless ...
|
||||
*
|
||||
* Example for NixOS projects:
|
||||
* shell_wrapper:
|
||||
* - nix
|
||||
* - develop
|
||||
* - --command
|
||||
*/
|
||||
shell_wrapper?: string[];
|
||||
|
||||
/**
|
||||
* Workspace lifecycle hooks. Shell scripts run at key points in the
|
||||
* worktree lifecycle (inspired by Symphony's hooks model).
|
||||
*
|
||||
* Scripts run via `sh -c` in the worktree directory. Failures are logged;
|
||||
* `after_create` failure is fatal to worktree creation, `after_run` is
|
||||
* best-effort.
|
||||
*/
|
||||
workspace?: {
|
||||
/** Runs once when a new worktree is first created. Failure aborts creation. */
|
||||
after_create?: string;
|
||||
/** Runs before each task dispatch inside the worktree. Failure aborts the task. */
|
||||
before_run?: string;
|
||||
/** Runs after each task completes (success or failure). Best-effort. */
|
||||
after_run?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface LoadedSFPreferences {
|
||||
|
|
|
|||
|
|
@ -700,6 +700,41 @@ export function validatePreferences(preferences: SFPreferences): {
|
|||
}
|
||||
}
|
||||
|
||||
// ─── Shell Wrapper ───────────────────────────────────────────────────
|
||||
if (preferences.shell_wrapper !== undefined) {
|
||||
if (
|
||||
Array.isArray(preferences.shell_wrapper) &&
|
||||
preferences.shell_wrapper.every((s) => typeof s === "string" && s.length > 0)
|
||||
) {
|
||||
validated.shell_wrapper = preferences.shell_wrapper as string[];
|
||||
} else {
|
||||
errors.push("shell_wrapper must be an array of non-empty strings");
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Workspace Lifecycle Hooks ───────────────────────────────────────
|
||||
if (preferences.workspace !== undefined) {
|
||||
if (
|
||||
typeof preferences.workspace === "object" &&
|
||||
preferences.workspace !== null
|
||||
) {
|
||||
const ws = preferences.workspace as Record<string, unknown>;
|
||||
const validatedWs: NonNullable<typeof validated.workspace> = {};
|
||||
for (const key of ["after_create", "before_run", "after_run"] as const) {
|
||||
if (ws[key] !== undefined) {
|
||||
if (typeof ws[key] === "string") {
|
||||
validatedWs[key] = ws[key] as string;
|
||||
} else {
|
||||
errors.push(`workspace.${key} must be a string`);
|
||||
}
|
||||
}
|
||||
}
|
||||
validated.workspace = validatedWs;
|
||||
} else {
|
||||
errors.push("workspace must be an object");
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Phase Skip Preferences ─────────────────────────────────────────
|
||||
if (preferences.phases !== undefined) {
|
||||
if (typeof preferences.phases === "object" && preferences.phases !== null) {
|
||||
|
|
|
|||
|
|
@ -862,5 +862,6 @@ export function resolveParallelConfig(
|
|||
auto_merge: prefs?.parallel?.auto_merge ?? "confirm",
|
||||
worker_model: prefs?.parallel?.worker_model,
|
||||
worker_timeout_minutes: prefs?.parallel?.worker_timeout_minutes,
|
||||
shell_wrapper: prefs?.shell_wrapper,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import { type ChildProcess, spawn } from "node:child_process";
|
|||
import { appendFileSync, existsSync, mkdirSync } from "node:fs";
|
||||
import { dirname, join } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { runWorktreePostCreateHook } from "./auto-worktree.js";
|
||||
import { getErrorMessage } from "./error-utils.js";
|
||||
import { sfRoot } from "./paths.js";
|
||||
import { writeSessionStatus } from "./session-status-io.js";
|
||||
|
|
@ -52,12 +53,14 @@ export interface SliceOrchestratorState {
|
|||
maxWorkers: number;
|
||||
startedAt: number;
|
||||
basePath: string;
|
||||
shellWrapper?: string[];
|
||||
}
|
||||
|
||||
export interface StartSliceParallelOpts {
|
||||
maxWorkers?: number;
|
||||
budgetCeiling?: number;
|
||||
useExecutionGraph?: boolean;
|
||||
shellWrapper?: string[];
|
||||
}
|
||||
|
||||
// ─── Module State ──────────────────────────────────────────────────────────
|
||||
|
|
@ -120,6 +123,7 @@ export async function startSliceParallel(
|
|||
maxWorkers,
|
||||
startedAt: Date.now(),
|
||||
basePath,
|
||||
shellWrapper: opts.shellWrapper,
|
||||
};
|
||||
|
||||
const started: string[] = [];
|
||||
|
|
@ -145,6 +149,10 @@ export async function startSliceParallel(
|
|||
|
||||
if (!existsSync(wtPath)) {
|
||||
createWorktree(basePath, wtName, { branch: wtBranch });
|
||||
const hookError = runWorktreePostCreateHook(basePath, wtPath);
|
||||
if (hookError) {
|
||||
logWarning("parallel", hookError, { worktree: wtName });
|
||||
}
|
||||
}
|
||||
|
||||
// Create worker info
|
||||
|
|
@ -346,22 +354,27 @@ function spawnSliceWorker(
|
|||
|
||||
let child: ChildProcess;
|
||||
try {
|
||||
child = spawn(
|
||||
process.execPath,
|
||||
[binPath, "--mode", "json", "--print", "/sf autonomous"],
|
||||
{
|
||||
cwd: worker.worktreePath,
|
||||
env: {
|
||||
...process.env,
|
||||
SF_SLICE_LOCK: sliceId,
|
||||
SF_MILESTONE_LOCK: milestoneId,
|
||||
SF_PROJECT_ROOT: basePath,
|
||||
SF_PARALLEL_WORKER: "1",
|
||||
},
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
detached: false,
|
||||
const shellWrapper = sliceState.shellWrapper ?? [];
|
||||
const workerArgs = [binPath, "--mode", "json", "--print", "/sf autonomous"];
|
||||
const [spawnCmd, spawnArgs] =
|
||||
shellWrapper.length > 0
|
||||
? [
|
||||
shellWrapper[0],
|
||||
[...shellWrapper.slice(1), process.execPath, ...workerArgs],
|
||||
]
|
||||
: [process.execPath, workerArgs];
|
||||
child = spawn(spawnCmd, spawnArgs, {
|
||||
cwd: worker.worktreePath,
|
||||
env: {
|
||||
...process.env,
|
||||
SF_SLICE_LOCK: sliceId,
|
||||
SF_MILESTONE_LOCK: milestoneId,
|
||||
SF_PROJECT_ROOT: basePath,
|
||||
SF_PARALLEL_WORKER: "1",
|
||||
},
|
||||
);
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
detached: false,
|
||||
});
|
||||
} catch (e) {
|
||||
logWarning(
|
||||
"parallel",
|
||||
|
|
|
|||
|
|
@ -469,6 +469,12 @@ export interface ParallelConfig {
|
|||
worker_model?: string;
|
||||
/** Minutes before a running worker is killed as hung. Default: 120. */
|
||||
worker_timeout_minutes?: number;
|
||||
/**
|
||||
* Shell wrapper prepended to sub-process spawns. When set, each worker
|
||||
* process is launched as: shell_wrapper[0] [...shell_wrapper.slice(1)] node sf ...
|
||||
* Example: ["nix", "develop", "--command"] for NixOS projects.
|
||||
*/
|
||||
shell_wrapper?: string[];
|
||||
}
|
||||
|
||||
// ─── Reactive Task Execution Types ───────────────────────────────────────
|
||||
|
|
|
|||
|
|
@ -113,7 +113,8 @@ export type UokNodeKind =
|
|||
| "subagent"
|
||||
| "team-worker"
|
||||
| "verification"
|
||||
| "reprocess";
|
||||
| "reprocess"
|
||||
| "refine";
|
||||
|
||||
export interface UokGraphNode {
|
||||
id: string;
|
||||
|
|
@ -124,6 +125,71 @@ export interface UokGraphNode {
|
|||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export type DispatchReasonCode =
|
||||
| "policy"
|
||||
| "state"
|
||||
| "recovery"
|
||||
| "manual"
|
||||
| "dependency"
|
||||
| "conflict"
|
||||
| "retry";
|
||||
|
||||
export interface DispatchExplanation {
|
||||
reasonCode: DispatchReasonCode;
|
||||
summary: string;
|
||||
evidence?: Record<string, unknown>;
|
||||
blockedBy?: Array<{
|
||||
kind: "gate" | "state" | "dependency" | "conflict" | "policy" | "manual";
|
||||
id: string;
|
||||
detail?: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface UokDispatchEnvelope {
|
||||
action: "dispatch" | "stop" | "skip";
|
||||
nodeKind?: UokNodeKind;
|
||||
unitType?: string;
|
||||
unitId?: string;
|
||||
prompt?: string;
|
||||
reason: DispatchExplanation;
|
||||
gateVerdict?: GateResult;
|
||||
constraints?: {
|
||||
reads?: string[];
|
||||
writes?: string[];
|
||||
dependsOn?: string[];
|
||||
maxWorkers?: number;
|
||||
};
|
||||
trace?: {
|
||||
traceId?: string;
|
||||
turnId?: string;
|
||||
causedBy?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface WriterToken {
|
||||
tokenId: string;
|
||||
traceId: string;
|
||||
turnId: string;
|
||||
acquiredAt: string;
|
||||
owner: "uok" | "legacy-compat" | "manual";
|
||||
}
|
||||
|
||||
export interface WriteSequence {
|
||||
traceId: string;
|
||||
turnId: string;
|
||||
sequence: number;
|
||||
}
|
||||
|
||||
export interface WriteRecord {
|
||||
writerToken: WriterToken;
|
||||
sequence: WriteSequence;
|
||||
category: "state" | "audit" | "gitops" | "gate" | "artifact" | "other";
|
||||
path?: string;
|
||||
operation: "append" | "replace" | "insert" | "update" | "delete" | "noop";
|
||||
ts: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface UokTurnObserver {
|
||||
onTurnStart(contract: TurnContract): void;
|
||||
onPhaseResult(
|
||||
|
|
|
|||
58
src/resources/extensions/sf/uok/dispatch-envelope.ts
Normal file
58
src/resources/extensions/sf/uok/dispatch-envelope.ts
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
import type {
|
||||
DispatchExplanation,
|
||||
DispatchReasonCode,
|
||||
GateResult,
|
||||
UokDispatchEnvelope,
|
||||
UokGraphNode,
|
||||
} from "./contracts.js";
|
||||
|
||||
export interface BuildDispatchEnvelopeInput {
|
||||
action: UokDispatchEnvelope["action"];
|
||||
node?: Pick<UokGraphNode, "kind" | "reads" | "writes" | "dependsOn">;
|
||||
unitType?: string;
|
||||
unitId?: string;
|
||||
prompt?: string;
|
||||
reasonCode: DispatchReasonCode;
|
||||
summary: string;
|
||||
evidence?: Record<string, unknown>;
|
||||
blockedBy?: DispatchExplanation["blockedBy"];
|
||||
gateVerdict?: GateResult;
|
||||
trace?: UokDispatchEnvelope["trace"];
|
||||
}
|
||||
|
||||
export function buildDispatchEnvelope(input: BuildDispatchEnvelopeInput): UokDispatchEnvelope {
|
||||
return {
|
||||
action: input.action,
|
||||
nodeKind: input.node?.kind,
|
||||
unitType: input.unitType,
|
||||
unitId: input.unitId,
|
||||
prompt: input.prompt,
|
||||
reason: {
|
||||
reasonCode: input.reasonCode,
|
||||
summary: input.summary,
|
||||
evidence: input.evidence,
|
||||
blockedBy: input.blockedBy,
|
||||
},
|
||||
gateVerdict: input.gateVerdict,
|
||||
constraints: input.node
|
||||
? {
|
||||
reads: input.node.reads,
|
||||
writes: input.node.writes,
|
||||
dependsOn: input.node.dependsOn,
|
||||
}
|
||||
: undefined,
|
||||
trace: input.trace,
|
||||
};
|
||||
}
|
||||
|
||||
export function explainDispatch(envelope: UokDispatchEnvelope): string {
|
||||
const subject =
|
||||
envelope.unitType && envelope.unitId
|
||||
? `${envelope.unitType} ${envelope.unitId}`
|
||||
: envelope.nodeKind ?? envelope.action;
|
||||
const blocked =
|
||||
envelope.reason.blockedBy && envelope.reason.blockedBy.length > 0
|
||||
? ` Blocked by: ${envelope.reason.blockedBy.map((b: { kind: string; id: string }) => `${b.kind}:${b.id}`).join(", ")}.`
|
||||
: "";
|
||||
return `[${envelope.reason.reasonCode}] ${subject}: ${envelope.reason.summary}.${blocked}`;
|
||||
}
|
||||
|
|
@ -35,17 +35,18 @@ export function resolveUokFlags(prefs: SFPreferences | undefined): UokFlags {
|
|||
return {
|
||||
enabled: enabledByPreference && !legacyFallback,
|
||||
legacyFallback,
|
||||
gates: uok?.gates?.enabled === true,
|
||||
modelPolicy: uok?.model_policy?.enabled === true,
|
||||
executionGraph: uok?.execution_graph?.enabled === true,
|
||||
gitops: uok?.gitops?.enabled === true,
|
||||
gates: uok?.gates?.enabled ?? true,
|
||||
modelPolicy: uok?.model_policy?.enabled ?? true,
|
||||
executionGraph: uok?.execution_graph?.enabled ?? true,
|
||||
gitops: uok?.gitops?.enabled ?? true,
|
||||
gitopsTurnAction: uok?.gitops?.turn_action ?? "status-only",
|
||||
gitopsTurnPush: uok?.gitops?.turn_push === true,
|
||||
auditEnvelope:
|
||||
uok?.audit_envelope?.enabled === true ||
|
||||
uok?.audit_unified?.enabled === true,
|
||||
(uok?.audit_envelope?.enabled ?? true) ||
|
||||
(uok?.audit_unified?.enabled ?? true),
|
||||
planningFlow:
|
||||
uok?.planning_flow?.enabled === true || uok?.plan_v2?.enabled === true,
|
||||
(uok?.planning_flow?.enabled ?? true) ||
|
||||
(uok?.plan_v2?.enabled ?? true),
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
87
src/resources/extensions/sf/uok/parity-report.ts
Normal file
87
src/resources/extensions/sf/uok/parity-report.ts
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { sfRoot } from "../paths.js";
|
||||
|
||||
export interface UokParityEvent {
|
||||
ts?: string;
|
||||
path?: string;
|
||||
phase?: string;
|
||||
status?: string;
|
||||
error?: string;
|
||||
flags?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface UokParityReport {
|
||||
generatedAt: string;
|
||||
sourcePath: string;
|
||||
totalEvents: number;
|
||||
paths: Record<string, number>;
|
||||
statuses: Record<string, number>;
|
||||
criticalMismatches: string[];
|
||||
fallbackInvocations: number;
|
||||
}
|
||||
|
||||
function parityLogPath(basePath: string): string {
|
||||
return join(sfRoot(basePath), "runtime", "uok-parity.jsonl");
|
||||
}
|
||||
|
||||
function reportPath(basePath: string): string {
|
||||
return join(sfRoot(basePath), "runtime", "uok-parity-report.json");
|
||||
}
|
||||
|
||||
function increment(bucket: Record<string, number>, key: string | undefined): void {
|
||||
const normalized = key && key.trim().length > 0 ? key : "unknown";
|
||||
bucket[normalized] = (bucket[normalized] ?? 0) + 1;
|
||||
}
|
||||
|
||||
export function parseParityEvents(raw: string): UokParityEvent[] {
|
||||
return raw
|
||||
.split("\n")
|
||||
.filter(Boolean)
|
||||
.map((line) => {
|
||||
try {
|
||||
return JSON.parse(line) as UokParityEvent;
|
||||
} catch {
|
||||
return { status: "error", error: "invalid parity json line" };
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function buildParityReport(
|
||||
events: readonly UokParityEvent[],
|
||||
sourcePath: string,
|
||||
): UokParityReport {
|
||||
const paths: Record<string, number> = {};
|
||||
const statuses: Record<string, number> = {};
|
||||
const criticalMismatches: string[] = [];
|
||||
let fallbackInvocations = 0;
|
||||
|
||||
for (const event of events) {
|
||||
increment(paths, event.path);
|
||||
increment(statuses, event.status);
|
||||
if (event.path === "legacy-fallback") fallbackInvocations += 1;
|
||||
if (event.status === "error") {
|
||||
criticalMismatches.push(event.error ?? "parity event reported error");
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
generatedAt: new Date().toISOString(),
|
||||
sourcePath,
|
||||
totalEvents: events.length,
|
||||
paths,
|
||||
statuses,
|
||||
criticalMismatches,
|
||||
fallbackInvocations,
|
||||
};
|
||||
}
|
||||
|
||||
export function writeParityReport(basePath: string): UokParityReport {
|
||||
const sourcePath = parityLogPath(basePath);
|
||||
const raw = existsSync(sourcePath) ? readFileSync(sourcePath, "utf-8") : "";
|
||||
const report = buildParityReport(parseParityEvents(raw), sourcePath);
|
||||
mkdirSync(join(sfRoot(basePath), "runtime"), { recursive: true });
|
||||
writeFileSync(reportPath(basePath), JSON.stringify(report, null, 2) + "\n", "utf-8");
|
||||
return report;
|
||||
}
|
||||
114
src/resources/extensions/sf/uok/writer.ts
Normal file
114
src/resources/extensions/sf/uok/writer.ts
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { randomUUID } from "node:crypto";
|
||||
|
||||
import { atomicWriteSync } from "../atomic-write.js";
|
||||
import { sfRoot } from "../paths.js";
|
||||
import type { WriteRecord, WriterToken } from "./contracts.js";
|
||||
|
||||
interface SequenceState {
|
||||
lastSequence: number;
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
const activeTokens = new Map<string, WriterToken>();
|
||||
|
||||
function tokenKey(basePath: string, turnId: string): string {
|
||||
return `${basePath}:${turnId}`;
|
||||
}
|
||||
|
||||
function sequencePath(basePath: string): string {
|
||||
return join(sfRoot(basePath), "runtime", "uok-writer-sequence.json");
|
||||
}
|
||||
|
||||
function readSequenceState(basePath: string): SequenceState {
|
||||
const path = sequencePath(basePath);
|
||||
if (!existsSync(path)) {
|
||||
return { lastSequence: 0, updatedAt: new Date(0).toISOString() };
|
||||
}
|
||||
try {
|
||||
const parsed = JSON.parse(readFileSync(path, "utf-8")) as Partial<SequenceState>;
|
||||
return {
|
||||
lastSequence: Number.isInteger(parsed.lastSequence) ? Number(parsed.lastSequence) : 0,
|
||||
updatedAt:
|
||||
typeof parsed.updatedAt === "string" ? parsed.updatedAt : new Date(0).toISOString(),
|
||||
};
|
||||
} catch {
|
||||
return { lastSequence: 0, updatedAt: new Date(0).toISOString() };
|
||||
}
|
||||
}
|
||||
|
||||
function writeSequenceState(basePath: string, state: SequenceState): void {
|
||||
atomicWriteSync(sequencePath(basePath), JSON.stringify(state, null, 2) + "\n", "utf-8");
|
||||
}
|
||||
|
||||
export function acquireWriterToken(args: {
|
||||
basePath: string;
|
||||
traceId: string;
|
||||
turnId: string;
|
||||
owner?: WriterToken["owner"];
|
||||
}): WriterToken {
|
||||
const key = tokenKey(args.basePath, args.turnId);
|
||||
const existing = activeTokens.get(key);
|
||||
if (existing) {
|
||||
throw new Error(`Writer token already active for turn ${args.turnId}`);
|
||||
}
|
||||
|
||||
const token: WriterToken = {
|
||||
tokenId: randomUUID(),
|
||||
traceId: args.traceId,
|
||||
turnId: args.turnId,
|
||||
acquiredAt: new Date().toISOString(),
|
||||
owner: args.owner ?? "uok",
|
||||
};
|
||||
activeTokens.set(key, token);
|
||||
return token;
|
||||
}
|
||||
|
||||
export function releaseWriterToken(basePath: string, token: WriterToken): void {
|
||||
const key = tokenKey(basePath, token.turnId);
|
||||
const current = activeTokens.get(key);
|
||||
if (current?.tokenId === token.tokenId) {
|
||||
activeTokens.delete(key);
|
||||
}
|
||||
}
|
||||
|
||||
export function hasActiveWriterToken(basePath: string, turnId: string): boolean {
|
||||
return activeTokens.has(tokenKey(basePath, turnId));
|
||||
}
|
||||
|
||||
export function nextWriteRecord(args: {
|
||||
basePath: string;
|
||||
token: WriterToken;
|
||||
category: WriteRecord["category"];
|
||||
operation: WriteRecord["operation"];
|
||||
path?: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
}): WriteRecord {
|
||||
if (!hasActiveWriterToken(args.basePath, args.token.turnId)) {
|
||||
throw new Error(`Writer token is not active for turn ${args.token.turnId}`);
|
||||
}
|
||||
|
||||
const state = readSequenceState(args.basePath);
|
||||
const sequence = state.lastSequence + 1;
|
||||
const updatedAt = new Date().toISOString();
|
||||
writeSequenceState(args.basePath, { lastSequence: sequence, updatedAt });
|
||||
|
||||
return {
|
||||
writerToken: args.token,
|
||||
sequence: {
|
||||
traceId: args.token.traceId,
|
||||
turnId: args.token.turnId,
|
||||
sequence,
|
||||
},
|
||||
category: args.category,
|
||||
operation: args.operation,
|
||||
path: args.path,
|
||||
ts: updatedAt,
|
||||
metadata: args.metadata,
|
||||
};
|
||||
}
|
||||
|
||||
export function resetWriterTokensForTests(): void {
|
||||
activeTokens.clear();
|
||||
}
|
||||
|
|
@ -62,6 +62,7 @@ export type LogComponent =
|
|||
| "registry" // Rule registry hook state
|
||||
| "renderer" // Markdown renderer and projections
|
||||
| "safety" // LLM safety harness
|
||||
| "scaffold" // Scaffold versioning, manifest, and drift detection (ADR-021)
|
||||
| "ecosystem"; // Third-party .sf/extensions/ plugins
|
||||
|
||||
export interface LogEntry {
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue