fix(sf): avoid parallel research redispatch loops
This commit is contained in:
parent
71ce87b981
commit
8133ba9003
2 changed files with 137 additions and 10 deletions
|
|
@ -78,6 +78,15 @@ import { extractVerdict, isAcceptableUatVerdict } from "./verdict-parser.js";
|
|||
import { logError, logWarning } from "./workflow-logger.js";
|
||||
|
||||
const MAX_PARALLEL_RESEARCH_SLICES = 8;
|
||||
const PARALLEL_RESEARCH_BLOCKING_PHASES = new Set([
|
||||
"blocked",
|
||||
"cancelled",
|
||||
"failed",
|
||||
"recovery",
|
||||
"runaway-warning-sent",
|
||||
"timeout",
|
||||
"timed-out",
|
||||
]);
|
||||
|
||||
// ─── Types ────────────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -125,6 +134,42 @@ function missingSliceStop(mid: string, phase: string): DispatchAction {
|
|||
};
|
||||
}
|
||||
|
||||
function hasPriorParallelResearchFailure(basePath: string, mid: string): boolean {
|
||||
const blocker = resolveMilestoneFile(basePath, mid, "PARALLEL-BLOCKER");
|
||||
if (blocker) return true;
|
||||
|
||||
const runtimeFile = join(
|
||||
sfRoot(basePath),
|
||||
"runtime",
|
||||
"units",
|
||||
`research-slice-${mid}-parallel-research.json`,
|
||||
);
|
||||
if (!existsSync(runtimeFile)) return false;
|
||||
|
||||
try {
|
||||
const state = JSON.parse(readFileSync(runtimeFile, "utf-8")) as {
|
||||
phase?: unknown;
|
||||
recoveryAttempts?: unknown;
|
||||
lastRecoveryReason?: unknown;
|
||||
};
|
||||
const phase = typeof state.phase === "string" ? state.phase : "";
|
||||
if (PARALLEL_RESEARCH_BLOCKING_PHASES.has(phase)) return true;
|
||||
if (
|
||||
typeof state.recoveryAttempts === "number" &&
|
||||
state.recoveryAttempts > 0
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
return typeof state.lastRecoveryReason === "string";
|
||||
} catch (err) {
|
||||
logWarning(
|
||||
"dispatch",
|
||||
`Ignoring unreadable parallel-research runtime state for ${mid}: ${err instanceof Error ? err.message : String(err)}`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatTaskCompleteFailurePrompt(reason: string): string {
|
||||
return `sf_task_complete failed: ${reason}. Try the call again, or investigate the write path.`;
|
||||
}
|
||||
|
|
@ -802,15 +847,10 @@ export const DISPATCH_RULES: DispatchRule[] = [
|
|||
if (researchReadySlices.length > MAX_PARALLEL_RESEARCH_SLICES)
|
||||
return null;
|
||||
|
||||
// #4414: If a previous parallel-research attempt escalated to a blocker
|
||||
// placeholder, skip this rule and fall through to per-slice research
|
||||
// (or other rules) rather than re-dispatching the same failing unit.
|
||||
const parallelBlocker = resolveMilestoneFile(
|
||||
basePath,
|
||||
mid,
|
||||
"PARALLEL-BLOCKER",
|
||||
);
|
||||
if (parallelBlocker) return null;
|
||||
// #4414: If a previous parallel-research attempt escalated or recovered
|
||||
// from a runaway, fall through to per-slice research instead of
|
||||
// re-dispatching the same synthetic unit.
|
||||
if (hasPriorParallelResearchFailure(basePath, mid)) return null;
|
||||
|
||||
return {
|
||||
action: "dispatch",
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import {
|
|||
} from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { dirname, join } from "node:path";
|
||||
import { test, afterEach } from 'vitest';
|
||||
import { afterEach, test } from "vitest";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
import { resolveDispatch } from "../auto-dispatch.ts";
|
||||
|
|
@ -216,3 +216,90 @@ test("resolveDispatch prefers parallel research when multiple slices are ready",
|
|||
assert.equal(action.unitId, "M001/parallel-research");
|
||||
}
|
||||
});
|
||||
|
||||
test("resolveDispatch skips parallel research when blocker artifact exists", async () => {
|
||||
const base = makeTmpProject();
|
||||
writeFileSync(
|
||||
join(
|
||||
base,
|
||||
".sf",
|
||||
"milestones",
|
||||
"M001",
|
||||
"M001-PARALLEL-BLOCKER.md",
|
||||
),
|
||||
"# Parallel Research Blocker\n\nPrevious parallel research stalled.\n",
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const action = await resolveDispatch({
|
||||
basePath: base,
|
||||
mid: "M001",
|
||||
midTitle: "Parallel Research Milestone",
|
||||
state: {
|
||||
phase: "planning",
|
||||
activeMilestone: {
|
||||
id: "M001",
|
||||
title: "Parallel Research Milestone",
|
||||
status: "active",
|
||||
},
|
||||
activeSlice: { id: "S01", title: "Alpha" },
|
||||
activeTask: null,
|
||||
registry: [],
|
||||
blockers: [],
|
||||
} as any,
|
||||
prefs: undefined,
|
||||
});
|
||||
|
||||
assert.equal(action.action, "dispatch");
|
||||
if (action.action === "dispatch") {
|
||||
assert.equal(action.unitType, "research-slice");
|
||||
assert.equal(action.unitId, "M001/S01");
|
||||
}
|
||||
});
|
||||
|
||||
test("resolveDispatch skips parallel research after runtime runaway recovery", async () => {
|
||||
const base = makeTmpProject();
|
||||
const unitsDir = join(base, ".sf", "runtime", "units");
|
||||
mkdirSync(unitsDir, { recursive: true });
|
||||
writeFileSync(
|
||||
join(unitsDir, "research-slice-M001-parallel-research.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
version: 1,
|
||||
unitType: "research-slice",
|
||||
unitId: "M001/parallel-research",
|
||||
phase: "runaway-warning-sent",
|
||||
recoveryAttempts: 1,
|
||||
lastRecoveryReason: "idle",
|
||||
},
|
||||
null,
|
||||
2,
|
||||
),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const action = await resolveDispatch({
|
||||
basePath: base,
|
||||
mid: "M001",
|
||||
midTitle: "Parallel Research Milestone",
|
||||
state: {
|
||||
phase: "planning",
|
||||
activeMilestone: {
|
||||
id: "M001",
|
||||
title: "Parallel Research Milestone",
|
||||
status: "active",
|
||||
},
|
||||
activeSlice: { id: "S01", title: "Alpha" },
|
||||
activeTask: null,
|
||||
registry: [],
|
||||
blockers: [],
|
||||
} as any,
|
||||
prefs: undefined,
|
||||
});
|
||||
|
||||
assert.equal(action.action, "dispatch");
|
||||
if (action.action === "dispatch") {
|
||||
assert.equal(action.unitType, "research-slice");
|
||||
assert.equal(action.unitId, "M001/S01");
|
||||
}
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue