Merge pull request #4163 from deseltrus/fix/auto-mode-premature-stops
fix(auto): prevent premature auto-mode stops on blocked phase + missing reassessment
This commit is contained in:
commit
78e8665c59
11 changed files with 100 additions and 33 deletions
|
|
@ -307,8 +307,11 @@ export const DISPATCH_RULES: DispatchRule[] = [
|
|||
{
|
||||
name: "reassess-roadmap (post-completion)",
|
||||
match: async ({ state, mid, midTitle, basePath, prefs }) => {
|
||||
if (prefs?.phases?.skip_reassess || !prefs?.phases?.reassess_after_slice)
|
||||
return null;
|
||||
if (prefs?.phases?.skip_reassess) return null;
|
||||
// Default reassess_after_slice to true — reassessment after slice completion
|
||||
// is essential for roadmap integrity. Opt-out via explicit `false`.
|
||||
const reassessEnabled = prefs?.phases?.reassess_after_slice ?? true;
|
||||
if (!reassessEnabled) return null;
|
||||
const needsReassess = await checkNeedsReassessment(basePath, mid, state);
|
||||
if (!needsReassess) return null;
|
||||
return {
|
||||
|
|
@ -877,11 +880,14 @@ export async function resolveDispatch(
|
|||
}
|
||||
}
|
||||
|
||||
// No rule matched — unhandled phase
|
||||
// No rule matched — unhandled phase.
|
||||
// Use level "warning" so the loop pauses (resumable) instead of hard-stopping.
|
||||
// Hard-stop here was causing premature termination for transient phase gaps
|
||||
// (e.g. after reassessment modifies the roadmap and state needs re-derivation).
|
||||
return {
|
||||
action: "stop",
|
||||
reason: `Unhandled phase "${ctx.state.phase}" — run /gsd doctor to diagnose.`,
|
||||
level: "info",
|
||||
level: "warning",
|
||||
matchedRule: "<no-match>",
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -481,10 +481,13 @@ export async function runPreDispatch(
|
|||
);
|
||||
} else if (state.phase === "blocked") {
|
||||
const blockerMsg = `Blocked: ${state.blockers.join(", ")}`;
|
||||
await deps.stopAuto(ctx, pi, blockerMsg);
|
||||
ctx.ui.notify(`${blockerMsg}. Fix and run /gsd auto.`, "warning");
|
||||
deps.sendDesktopNotification("GSD", blockerMsg, "error", "attention", basename(s.originalBasePath || s.basePath));
|
||||
deps.logCmuxEvent(prefs, blockerMsg, "error");
|
||||
// Pause instead of hard-stop so the session is resumable with `/gsd auto`.
|
||||
// Hard-stop here was causing premature termination when slice dependencies
|
||||
// were temporarily unresolvable (e.g. after reassessment added new slices).
|
||||
await deps.pauseAuto(ctx, pi);
|
||||
ctx.ui.notify(`${blockerMsg}. Fix and run /gsd auto to resume.`, "warning");
|
||||
deps.sendDesktopNotification("GSD", blockerMsg, "warning", "attention", basename(s.originalBasePath || s.basePath));
|
||||
deps.logCmuxEvent(prefs, blockerMsg, "warning");
|
||||
} else {
|
||||
const ids = incomplete.map((m: { id: string }) => m.id).join(", ");
|
||||
const diag = `basePath=${s.basePath}, milestones=[${state.registry.map((m: { id: string; status: string }) => `${m.id}:${m.status}`).join(", ")}], phase=${state.phase}`;
|
||||
|
|
@ -583,13 +586,23 @@ export async function runPreDispatch(
|
|||
return { action: "break", reason: "milestone-complete" };
|
||||
}
|
||||
|
||||
// Terminal: blocked
|
||||
// Terminal: blocked — pause instead of hard-stop so the session is resumable.
|
||||
if (state.phase === "blocked") {
|
||||
const blockerMsg = `Blocked: ${state.blockers.join(", ")}`;
|
||||
await closeoutAndStop(ctx, pi, s, deps, blockerMsg);
|
||||
ctx.ui.notify(`${blockerMsg}. Fix and run /gsd auto.`, "warning");
|
||||
deps.sendDesktopNotification("GSD", blockerMsg, "error", "attention", basename(s.originalBasePath || s.basePath));
|
||||
deps.logCmuxEvent(prefs, blockerMsg, "error");
|
||||
if (s.currentUnit) {
|
||||
await deps.closeoutUnit(
|
||||
ctx,
|
||||
s.basePath,
|
||||
s.currentUnit.type,
|
||||
s.currentUnit.id,
|
||||
s.currentUnit.startedAt,
|
||||
deps.buildSnapshotOpts(s.currentUnit.type, s.currentUnit.id),
|
||||
);
|
||||
}
|
||||
await deps.pauseAuto(ctx, pi);
|
||||
ctx.ui.notify(`${blockerMsg}. Fix and run /gsd auto to resume.`, "warning");
|
||||
deps.sendDesktopNotification("GSD", blockerMsg, "warning", "attention", basename(s.originalBasePath || s.basePath));
|
||||
deps.logCmuxEvent(prefs, blockerMsg, "warning");
|
||||
debugLog("autoLoop", { phase: "exit", reason: "blocked" });
|
||||
deps.emitJournalEvent({ ts: new Date().toISOString(), flowId: ic.flowId, seq: ic.nextSeq(), eventType: "terminal", data: { reason: "blocked", blockers: state.blockers } });
|
||||
return { action: "break", reason: "blocked" };
|
||||
|
|
|
|||
|
|
@ -157,7 +157,7 @@ Setting `prefer_skills: []` does **not** disable skill discovery — it just mea
|
|||
|
||||
- `phases`: fine-grained control over which phases run. Usually set by `token_profile`, but can be overridden. Keys:
|
||||
- `skip_research`: boolean — skip milestone-level research. Default: `false`.
|
||||
- `reassess_after_slice`: boolean — run roadmap reassessment after each completed slice. Default: `false`.
|
||||
- `reassess_after_slice`: boolean — run roadmap reassessment after each completed slice. Default: `true`.
|
||||
- `skip_reassess`: boolean — force-disable roadmap reassessment even if `reassess_after_slice` is enabled. Default: `false`.
|
||||
- `skip_slice_research`: boolean — skip per-slice research. Default: `false`.
|
||||
|
||||
|
|
|
|||
|
|
@ -630,13 +630,39 @@ function resolveSliceDependencies(activeMilestoneSlices: SliceRow[]): { activeSl
|
|||
}
|
||||
}
|
||||
|
||||
// First pass: find a slice with ALL dependencies satisfied (strict)
|
||||
let bestFallback: SliceRow | null = null;
|
||||
let bestFallbackSatisfied = -1;
|
||||
|
||||
for (const s of activeMilestoneSlices) {
|
||||
if (isStatusDone(s.status)) continue;
|
||||
if (isDeferredStatus(s.status)) continue;
|
||||
if (s.depends.every(dep => doneSliceIds.has(dep))) {
|
||||
return { activeSlice: { id: s.id, title: s.title }, activeSliceRow: s };
|
||||
}
|
||||
// Track the slice with the most satisfied dependencies as fallback
|
||||
const satisfied = s.depends.filter(dep => doneSliceIds.has(dep)).length;
|
||||
if (satisfied > bestFallbackSatisfied || (satisfied === bestFallbackSatisfied && !bestFallback)) {
|
||||
bestFallback = s;
|
||||
bestFallbackSatisfied = satisfied;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: if no slice has all deps met but there ARE incomplete non-deferred
|
||||
// slices, pick the one with the most deps satisfied. This prevents hard-blocking
|
||||
// when dependency metadata is stale (e.g. after reassessment added/removed slices)
|
||||
// or when deps reference slices from previous milestones.
|
||||
if (bestFallback) {
|
||||
const unmet = bestFallback.depends.filter(dep => !doneSliceIds.has(dep));
|
||||
logWarning("state",
|
||||
`No slice has all deps satisfied — falling back to ${bestFallback.id} ` +
|
||||
`(${bestFallbackSatisfied}/${bestFallback.depends.length} deps met, ` +
|
||||
`unmet: ${unmet.join(", ")})`,
|
||||
{ mid: activeMilestoneSlices[0]?.milestone_id, sid: bestFallback.id },
|
||||
);
|
||||
return { activeSlice: { id: bestFallback.id, title: bestFallback.title }, activeSliceRow: bestFallback };
|
||||
}
|
||||
|
||||
return { activeSlice: null, activeSliceRow: null };
|
||||
}
|
||||
|
||||
|
|
@ -1431,12 +1457,32 @@ export async function _deriveStateImpl(basePath: string): Promise<GSDState> {
|
|||
};
|
||||
}
|
||||
} else {
|
||||
let bestFallbackLegacy: { id: string; title: string; depends: string[] } | null = null;
|
||||
let bestFallbackLegacySatisfied = -1;
|
||||
|
||||
for (const s of activeRoadmap.slices) {
|
||||
if (s.done) continue;
|
||||
if (s.depends.every(dep => doneSliceIds.has(dep))) {
|
||||
activeSlice = { id: s.id, title: s.title };
|
||||
break;
|
||||
}
|
||||
// Track best fallback
|
||||
const satisfied = s.depends.filter(dep => doneSliceIds.has(dep)).length;
|
||||
if (satisfied > bestFallbackLegacySatisfied) {
|
||||
bestFallbackLegacy = s;
|
||||
bestFallbackLegacySatisfied = satisfied;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: if no slice has all deps met, pick the one with the most deps satisfied
|
||||
if (!activeSlice && bestFallbackLegacy) {
|
||||
const unmet = bestFallbackLegacy.depends.filter(dep => !doneSliceIds.has(dep));
|
||||
logWarning("state",
|
||||
`No slice has all deps satisfied — falling back to ${bestFallbackLegacy.id} ` +
|
||||
`(${bestFallbackLegacySatisfied}/${bestFallbackLegacy.depends.length} deps met, ` +
|
||||
`unmet: ${unmet.join(", ")})`,
|
||||
);
|
||||
activeSlice = { id: bestFallbackLegacy.id, title: bestFallbackLegacy.title };
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -688,8 +688,8 @@ test("autoLoop exits on terminal blocked state", async (t) => {
|
|||
|
||||
assert.ok(deps.callLog.includes("deriveState"), "should have derived state");
|
||||
assert.ok(
|
||||
deps.callLog.includes("stopAuto"),
|
||||
"should have called stopAuto for blocked state",
|
||||
deps.callLog.includes("pauseAuto"),
|
||||
"should have called pauseAuto for blocked state",
|
||||
);
|
||||
assert.ok(
|
||||
!deps.callLog.includes("resolveDispatch"),
|
||||
|
|
|
|||
|
|
@ -351,8 +351,9 @@ skills_used: []
|
|||
const dbState = await deriveStateFromDb(base);
|
||||
|
||||
assertStatesEqual(dbState, fileState, 'E-blocked');
|
||||
assert.deepStrictEqual(dbState.phase, 'blocked', 'E-blocked: phase is blocked');
|
||||
assert.ok(dbState.blockers.length > 0, 'E-blocked: has blockers');
|
||||
// With partial-dep fallback, circular deps no longer block — fallback picks first eligible slice
|
||||
assert.deepStrictEqual(dbState.phase, 'planning', 'E-blocked: phase is planning (fallback picks a slice)');
|
||||
assert.ok(dbState.activeSlice !== null, 'E-blocked: activeSlice is set via fallback');
|
||||
|
||||
closeDatabase();
|
||||
} finally {
|
||||
|
|
|
|||
|
|
@ -616,9 +616,10 @@ describe('derive-state-db', async () => {
|
|||
invalidateStateCache();
|
||||
const dbState = await deriveStateFromDb(base);
|
||||
|
||||
assert.deepStrictEqual(dbState.phase, 'blocked', 'blocked-db: phase is blocked');
|
||||
// With partial-dep fallback, circular deps no longer block — fallback picks first eligible slice
|
||||
assert.deepStrictEqual(dbState.phase, 'planning', 'blocked-db: phase is planning (fallback picks a slice)');
|
||||
assert.deepStrictEqual(dbState.phase, fileState.phase, 'blocked-db: phase matches filesystem');
|
||||
assert.ok(dbState.blockers.length > 0, 'blocked-db: has blockers');
|
||||
assert.ok(dbState.activeSlice !== null, 'blocked-db: activeSlice is set via fallback');
|
||||
|
||||
closeDatabase();
|
||||
} finally {
|
||||
|
|
|
|||
|
|
@ -446,9 +446,9 @@ Continue from step 2.
|
|||
|
||||
const state2 = await deriveState(base2);
|
||||
|
||||
assert.deepStrictEqual(state2.phase, 'blocked', 'blocked-B: phase is blocked');
|
||||
assert.deepStrictEqual(state2.activeSlice, null, 'blocked-B: activeSlice is null');
|
||||
assert.ok(state2.blockers.length > 0, 'blocked-B: blockers array is non-empty');
|
||||
// With partial-dep fallback, S01 is picked despite unmet dep on S99
|
||||
assert.deepStrictEqual(state2.phase, 'planning', 'blocked-B: phase is planning (fallback picks S01)');
|
||||
assert.deepStrictEqual(state2.activeSlice?.id, 'S01', 'blocked-B: activeSlice is S01 via fallback');
|
||||
} finally {
|
||||
cleanup(base2);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -691,7 +691,7 @@ describe("transition boundary failures", () => {
|
|||
);
|
||||
});
|
||||
|
||||
test("blocked state: all slices have unmet deps → blocked phase", async () => {
|
||||
test("blocked state: all slices have unmet deps → fallback picks slice", async () => {
|
||||
base = makeTempDir();
|
||||
const mDir = join(base, ".gsd", "milestones", "M001");
|
||||
mkdirSync(join(mDir, "slices", "S01", "tasks"), { recursive: true });
|
||||
|
|
@ -736,7 +736,9 @@ describe("transition boundary failures", () => {
|
|||
|
||||
invalidateAllCaches();
|
||||
const state = await deriveStateFromDb(base);
|
||||
assert.equal(state.phase, "blocked", "circular deps should produce blocked phase");
|
||||
// With partial-dep fallback, circular deps no longer block — fallback picks first eligible slice
|
||||
assert.equal(state.phase, "planning", "circular deps: fallback picks a slice instead of blocking");
|
||||
assert.ok(state.activeSlice !== null, "activeSlice set via fallback");
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -811,9 +811,9 @@ describe("state-machine-full-walkthrough", () => {
|
|||
assert.ok(state.blockers.length > 0, "should have blockers");
|
||||
});
|
||||
|
||||
test("no eligible slice (all deps unmet) → blocked at slice level", async () => {
|
||||
test("no eligible slice (all deps unmet) → fallback picks slice with most deps satisfied", async () => {
|
||||
const base = createFixtureBase();
|
||||
// S01 depends on S00 which doesn't exist
|
||||
// S01 depends on S00 which doesn't exist — fallback picks S01 anyway
|
||||
writeRoadmap(base, "M001", [
|
||||
"# M001: Test Milestone",
|
||||
"",
|
||||
|
|
@ -827,11 +827,9 @@ describe("state-machine-full-walkthrough", () => {
|
|||
invalidateStateCache();
|
||||
const state = await deriveState(base);
|
||||
|
||||
assert.equal(state.phase, "blocked");
|
||||
assert.ok(
|
||||
state.blockers.some(b => b.includes("dependency") || b.includes("eligible")),
|
||||
"blockers should mention dependency or eligibility",
|
||||
);
|
||||
// With partial-dep fallback, S01 is picked despite unmet dep on S00
|
||||
assert.equal(state.phase, "planning");
|
||||
assert.equal(state.activeSlice?.id, "S01");
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -263,6 +263,6 @@ test("dispatch: phase skip guards return null (not stop)", () => {
|
|||
const researchGuard = dispatchSrc.match(/skip_research\).*?return null/s);
|
||||
assert.ok(researchGuard, "skip_research guard should return null (fall-through)");
|
||||
|
||||
const reassessGuard = dispatchSrc.match(/reassess_after_slice\).*?return null/s);
|
||||
const reassessGuard = dispatchSrc.match(/reassess_after_slice.*?return null/s);
|
||||
assert.ok(reassessGuard, "reassess_after_slice guard should return null (fall-through)");
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue