singularity-forge/src/resources/extensions/sf/doctor.js

1992 lines
62 KiB
JavaScript

import {
copyFileSync,
existsSync,
lstatSync,
mkdirSync,
readdirSync,
readFileSync,
rmSync,
writeFileSync,
} from "node:fs";
import { dirname, extname, join } from "node:path";
import { parse as parseYaml } from "yaml";
import { invalidateAllCaches } from "./cache.js";
import {
checkConfigHealth,
checkEngineHealth,
checkGitHealth,
checkGlobalHealth,
checkRuntimeHealth,
checkSmHealth,
checkTurnStatusHealth,
checkVaultHealth,
} from "./doctor-checks.js";
import { checkEnvironmentHealth } from "./doctor-environment.js";
import { runProviderChecks } from "./doctor-providers.js";
import { GLOBAL_STATE_CODES } from "./doctor-types.js";
import {
countMustHavesMentionedInSummary,
loadFile,
parseSummary,
parseTaskPlanMustHaves,
saveFile,
} from "./files.js";
import { nativeScanSfTree } from "./native-parser-bridge.js";
import { parsePlan, parseRoadmap } from "./parsers.js";
import {
milestonesDir,
relMilestoneFile,
relMilestonePath,
relSfRootFile,
relSliceFile,
relSlicePath,
relTaskFile,
resolveMilestoneFile,
resolveMilestonePath,
resolveSfRootFile,
resolveSliceFile,
resolveSlicePath,
resolveTaskFile,
resolveTasksDir,
sfRoot,
} from "./paths.js";
import { loadEffectiveSFPreferences } from "./preferences.js";
import { bodyHash, extractMarker } from "./scaffold-versioning.js";
import { readAllSelfFeedback, recordSelfFeedback } from "./self-feedback.js";
import { getMilestoneSlices, getSliceTasks, isDbAvailable } from "./sf-db.js";
import { deriveState, isMilestoneComplete } from "./state.js";
import { isClosedStatus } from "./status-guards.js";
import { parseUnitId } from "./unit-id.js";
// ─── Flow Audit Implementation ────────────────────────────────────────────
const DEFAULT_STALE_PROGRESS_MS = 20 * 60 * 1000;
const DEFAULT_OPTIONAL_CHILD_BUDGET_MS = 30 * 60 * 1000;
const DEFAULT_RECENT_ERROR_MAX_AGE_MS = 30 * 60 * 1000;
const REPEATED_FAILURE_THRESHOLD = 3;
const FLOW_AUDIT_ROLLUP_KIND = "flow-audit:repeated-milestone-failure";
const DOCTOR_HISTORY_SCHEMA_VERSION = 1;
const LEGACY_ROOT_HARNESS_PATHS = [
"harness/AGENTS.md",
"harness/specs/AGENTS.md",
"harness/specs/bootstrap.md",
"harness/evals/AGENTS.md",
"harness/graders/AGENTS.md",
];
const SF_FORM_LINT_SKIP_DIRS = new Set([
"node_modules",
"worktrees",
"sift",
".sift",
]);
function pruneEmptyDir(path) {
try {
if (existsSync(path) && readdirSync(path).length === 0) {
rmSync(path, { recursive: false });
}
} catch {
// Best-effort cleanup only.
}
}
function collectOwnedLegacyRootHarnessFiles(basePath) {
const owned = [];
for (const relPath of LEGACY_ROOT_HARNESS_PATHS) {
const target = join(basePath, relPath);
if (!existsSync(target)) continue;
const { marker, body } = extractMarker(target);
if (!marker) continue;
if (marker.template !== relPath) continue;
if (marker.state !== "pending") continue;
if (bodyHash(body) !== marker.hash) continue;
owned.push(relPath);
}
return owned;
}
function removeOwnedLegacyRootHarnessFiles(basePath, relPaths) {
for (const relPath of relPaths) {
rmSync(join(basePath, relPath), { force: true });
}
for (const relPath of relPaths) {
let dir = dirname(join(basePath, relPath));
while (dir.startsWith(join(basePath, "harness"))) {
pruneEmptyDir(dir);
if (dir === join(basePath, "harness")) break;
dir = dirname(dir);
}
}
pruneEmptyDir(join(basePath, "harness"));
}
function checkGeneratedArtifactResidue(
basePath,
issues,
fixesApplied,
shouldFix,
) {
const ownedRootHarness = collectOwnedLegacyRootHarnessFiles(basePath);
if (ownedRootHarness.length === 0) return;
issues.push({
severity: "warning",
code: "generated_root_harness_residue",
scope: "project",
unitId: "project",
message: `Found ${ownedRootHarness.length} SF-owned generated harness file(s) under root harness/. Generated operational harness belongs under .sf/harness/; promote durable contracts to docs/specs/ explicitly.`,
file: "harness/",
fixable: true,
});
if (shouldFix("generated_root_harness_residue")) {
removeOwnedLegacyRootHarnessFiles(basePath, ownedRootHarness);
fixesApplied.push(
`removed ${ownedRootHarness.length} SF-owned root harness file(s)`,
);
}
}
function walkSfTreeFallback(root, prefix = "") {
const entries = [];
if (!existsSync(root)) return entries;
for (const name of readdirSync(root)) {
const fullPath = join(root, name);
const relPath = prefix ? `${prefix}/${name}` : name;
let stat;
try {
stat = lstatSync(fullPath);
} catch {
continue;
}
const isDir = stat.isDirectory();
entries.push({ path: relPath, name, isDir });
if (isDir && !SF_FORM_LINT_SKIP_DIRS.has(name)) {
entries.push(...walkSfTreeFallback(fullPath, relPath));
}
}
return entries;
}
function collectSfFormFiles(basePath) {
const root = sfRoot(basePath);
if (!existsSync(root)) return [];
const scanned = nativeScanSfTree(root) ?? walkSfTreeFallback(root);
return scanned
.filter((entry) => !entry.isDir)
.map((entry) => entry.path)
.filter((relPath) => {
const parts = relPath.split("/");
return !parts.some((part) => SF_FORM_LINT_SKIP_DIRS.has(part));
});
}
function parseJsonl(content) {
const lines = content.split(/\r?\n/);
for (let i = 0; i < lines.length; i++) {
const line = lines[i].trim();
if (!line) continue;
try {
JSON.parse(line);
} catch (error) {
const msg = error instanceof Error ? error.message : String(error);
return `line ${i + 1}: ${msg}`;
}
}
return null;
}
function parseMarkdownFrontmatter(content) {
if (!content.startsWith("---\n") && !content.startsWith("---\r\n")) {
return null;
}
const normalized = content.replace(/\r\n/g, "\n");
const end = normalized.indexOf("\n---\n", 4);
if (end === -1) return "frontmatter opening marker has no closing marker";
const frontmatter = normalized.slice(4, end);
try {
parseYaml(frontmatter);
return null;
} catch (error) {
return error instanceof Error ? error.message : String(error);
}
}
function normalizeFrontmatterArrayScalars(frontmatter) {
const lines = frontmatter.split("\n");
const output = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
const itemMatch = line.match(/^(\s*)-\s+(.+)$/);
if (!itemMatch) {
output.push(line);
continue;
}
const indent = itemMatch[1];
const continuation = [itemMatch[2]];
while (i + 1 < lines.length) {
const next = lines[i + 1];
if (/^\S[^:]*:\s*/.test(next)) break;
if (next.match(/^(\s*)-\s+(.+)$/)?.[1] === indent) break;
if (next.trim() === "") break;
continuation.push(next.trim());
i++;
}
output.push(`${indent}- ${JSON.stringify(continuation.join("\\n"))}`);
}
return output.join("\n");
}
function repairMarkdownFrontmatter(content) {
if (!content.startsWith("---\n") && !content.startsWith("---\r\n")) {
return null;
}
const normalized = content.replace(/\r\n/g, "\n");
const end = normalized.indexOf("\n---\n", 4);
if (end === -1) return null;
const frontmatter = normalized.slice(4, end);
const repairedFrontmatter = normalizeFrontmatterArrayScalars(frontmatter);
if (repairedFrontmatter === frontmatter) return null;
try {
parseYaml(repairedFrontmatter);
} catch {
return null;
}
return `---\n${repairedFrontmatter}\n---\n${normalized.slice(end + 5)}`;
}
function repairJsonl(content) {
if (!content.trim()) return null;
try {
const parsed = JSON.parse(content);
if (parsed && typeof parsed === "object") {
return `${JSON.stringify(parsed)}\n`;
}
} catch {
return null;
}
return null;
}
function repairSfFormContent(ext, content) {
if (ext === ".jsonl") return repairJsonl(content);
if (ext === ".md") return repairMarkdownFrontmatter(content);
return null;
}
function checkSfFormSyntax(basePath, issues, fixesApplied, shouldFix) {
const root = sfRoot(basePath);
for (const relPath of collectSfFormFiles(basePath)) {
const filePath = join(root, relPath);
let content;
try {
content = readFileSync(filePath, "utf-8");
} catch {
continue;
}
const ext = extname(relPath).toLowerCase();
let parseError = null;
try {
if (ext === ".json") {
JSON.parse(content);
} else if (ext === ".jsonl") {
parseError = parseJsonl(content);
} else if (ext === ".yaml" || ext === ".yml") {
parseYaml(content);
} else if (ext === ".md") {
parseError = parseMarkdownFrontmatter(content);
}
} catch (error) {
parseError = error instanceof Error ? error.message : String(error);
}
if (parseError) {
const repaired = repairSfFormContent(ext, content);
const repairable = repaired !== null;
issues.push({
severity: "error",
code: "invalid_sf_form",
scope: "project",
unitId: "project",
message: `.sf/${relPath} has invalid ${ext.slice(1) || "form"} syntax: ${parseError}`,
file: `.sf/${relPath}`,
fixable: repairable,
});
if (repairable && shouldFix("invalid_sf_form")) {
writeFileSync(filePath, repaired, "utf-8");
content = repaired;
fixesApplied.push(`repaired .sf form syntax in .sf/${relPath}`);
}
}
}
}
function parseEpochMs(value, fallbackMs) {
if (typeof value === "number" && Number.isFinite(value)) {
return value < 10_000_000_000 ? value * 1000 : value;
}
if (typeof value === "string" && value.trim()) {
const parsed = new Date(value).getTime();
if (Number.isFinite(parsed)) return parsed;
}
return fallbackMs;
}
function formatIso(ms) {
if (ms === undefined || !Number.isFinite(ms)) return undefined;
return new Date(ms).toISOString();
}
function minutes(ms) {
return Math.max(0, Math.round(ms / 60_000));
}
function readJsonFile(path) {
try {
if (!existsSync(path)) return null;
return JSON.parse(readFileSync(path, "utf8"));
} catch {
return null;
}
}
function readRuntimeUnits(runtimeUnitsDir) {
if (!existsSync(runtimeUnitsDir)) return [];
const records = [];
try {
for (const file of readdirSync(runtimeUnitsDir)) {
if (!file.endsWith(".json")) continue;
const record = readJsonFile(join(runtimeUnitsDir, file));
if (record) records.push(record);
}
} catch {
// Runtime audit must stay best-effort.
}
return records;
}
function parsePsOutput(psOutput) {
const rows = [];
for (const line of psOutput.split("\n")) {
const trimmed = line.trim();
if (!trimmed) continue;
const match = trimmed.match(/^(\d+)\s+(\d+)(?:\s+(\d+))?\s+(.+)$/);
if (!match) continue;
const pid = Number.parseInt(match[1], 10);
const ppid = Number.parseInt(match[2], 10);
if (!Number.isFinite(pid) || !Number.isFinite(ppid)) continue;
const elapsedSeconds =
match[3] === undefined ? undefined : Number.parseInt(match[3], 10);
rows.push({
pid,
ppid,
ageMs:
elapsedSeconds !== undefined && Number.isFinite(elapsedSeconds)
? elapsedSeconds * 1000
: undefined,
cmd: match[4],
});
}
return rows;
}
async function readPsRows(options) {
if (options.psOutput !== undefined) return parsePsOutput(options.psOutput);
if (process.platform === "win32") return [];
try {
const { execSync } = await import("node:child_process");
const psOutput = execSync("ps -eo pid,ppid,etimes,cmd --no-headers", {
encoding: "utf8",
timeout: 5000,
});
return parsePsOutput(psOutput);
} catch {
return [];
}
}
function classifyProcess(row, rows = []) {
const cmd = row.cmd.toLowerCase();
if (cmd.includes("sift") || cmd.includes("warmup")) return "warmup";
if (row.ppid === 1 && cmd.includes("next-server")) return "orphan";
const parent = rows.find((candidate) => candidate.pid === row.ppid);
const parentCmd = parent?.cmd?.toLowerCase?.() ?? "";
if (
cmd.trim() === "sf" &&
(parentCmd.includes("next-server") ||
parentCmd.includes("vite") ||
parentCmd.includes("turbopack"))
) {
return "orphan";
}
if (
cmd.includes("next-server") ||
cmd.includes("vite") ||
cmd.includes("turbopack")
) {
return "background";
}
if (
(cmd.includes("node") || cmd.includes("sf-run") || cmd.includes("codex")) &&
(cmd.includes(" sf") ||
cmd.includes("/sf") ||
cmd.includes("dist/loader") ||
cmd.includes("tool-session") ||
cmd.includes("headless"))
) {
return "active-session";
}
return "unknown";
}
function isOptionalChild(classification) {
return (
classification === "warmup" ||
classification === "background" ||
classification === "orphan"
);
}
function shouldIncludeProcess(row, classification, activePid) {
if (classification !== "unknown") return true;
if (activePid === undefined) return false;
return row.pid === activePid || row.ppid === activePid;
}
function parseNotificationEpochMs(entry) {
const value = entry.ts ?? entry.timestamp ?? entry.time;
if (typeof value !== "string") return null;
const parsed = Date.parse(value);
return Number.isFinite(parsed) ? parsed : null;
}
function readRecentErrors(runtimeRoot, options = {}) {
const notificationsPath = join(runtimeRoot, "notifications.jsonl");
if (!existsSync(notificationsPath)) return [];
const nowMs = options.nowMs ?? Date.now();
const maxAgeMs = options.maxAgeMs ?? DEFAULT_RECENT_ERROR_MAX_AGE_MS;
const errors = [];
try {
const lines = readFileSync(notificationsPath, "utf8")
.split("\n")
.filter((l) => l.trim());
for (const line of lines.slice(-20)) {
try {
const entry = JSON.parse(line);
const message = entry.message ?? entry.text ?? "";
if (
typeof message === "string" &&
message.startsWith("Flow audit: Review recent errors")
) {
continue;
}
const entryMs = parseNotificationEpochMs(entry);
if (
entryMs !== null &&
(nowMs - entryMs < 0 || nowMs - entryMs > maxAgeMs)
) {
continue;
}
if (
entry.severity === "error" ||
message.toLowerCase().includes("error") ||
message.toLowerCase().includes("failed")
) {
errors.push(message || "Unknown error");
}
} catch {
// skip malformed notification rows
}
}
} catch {
// non-fatal
}
return errors;
}
function buildLoopEvidence(basePath, unitType, unitId) {
if (unitType !== "execute-task") return undefined;
const { milestone, slice, task } = parseUnitId(unitId);
if (!milestone || !slice || !task) return undefined;
const planPath = resolveSliceFile(basePath, milestone, slice, "PLAN");
if (!planPath || !existsSync(planPath)) return undefined;
const completedPriorTasks = [];
const missingSummaries = [];
try {
const plan = parsePlan(readFileSync(planPath, "utf8"));
const currentIndex = plan.tasks.findIndex((t) => t.id === task);
if (currentIndex > 0) {
for (const prior of plan.tasks.slice(0, currentIndex)) {
if (prior.done) completedPriorTasks.push(prior.id);
}
}
if (!resolveTaskFile(basePath, milestone, slice, task, "SUMMARY")) {
missingSummaries.push(`${milestone}/${slice}/${task} task SUMMARY`);
}
const allTasksDone =
plan.tasks.length > 0 && plan.tasks.every((t) => t.done);
if (
allTasksDone &&
!resolveSliceFile(basePath, milestone, slice, "SUMMARY")
) {
missingSummaries.push(`${milestone}/${slice} slice SUMMARY`);
}
} catch {
return undefined;
}
return {
milestoneId: milestone,
sliceId: slice,
taskId: task,
completedPriorTasks,
missingSummaries,
};
}
function collectRunawayHistory(runtimeUnits, feedback, milestoneId) {
const history = [];
for (const unit of runtimeUnits) {
const pause = unit.runawayGuardPause;
if (!pause) continue;
const id = pause.unitId ?? unit.unitId ?? "unknown";
if (milestoneId && !id.startsWith(`${milestoneId}/`)) continue;
history.push(pause.reason ?? `Runaway guard paused ${id}`);
}
for (const entry of feedback) {
if (entry.resolvedAt) continue;
if (milestoneId && entry.occurredIn?.milestone !== milestoneId) continue;
if (
entry.kind.includes("runaway") ||
entry.summary.toLowerCase().includes("runaway")
) {
history.push(`${entry.kind}: ${entry.summary}`);
}
}
return Array.from(new Set(history)).slice(-10);
}
function maybeRecordRepeatedFailureRollup(
basePath,
milestoneId,
feedback,
options,
) {
if (!milestoneId || options.recordSelfFeedback === false) return undefined;
const failures = feedback.filter(
(e) =>
!e.resolvedAt &&
e.occurredIn?.milestone === milestoneId &&
e.kind !== FLOW_AUDIT_ROLLUP_KIND,
);
if (failures.length < REPEATED_FAILURE_THRESHOLD) return undefined;
const openRollup = feedback.find(
(e) =>
!e.resolvedAt &&
e.kind === FLOW_AUDIT_ROLLUP_KIND &&
e.occurredIn?.milestone === milestoneId,
);
if (openRollup) {
return {
filed: false,
milestoneId,
count: failures.length,
entryId: openRollup.id,
};
}
const evidence = failures
.slice(-8)
.map(
(e) =>
`[${e.id}] ${e.kind} ${[
e.occurredIn?.milestone,
e.occurredIn?.slice,
e.occurredIn?.task,
]
.filter(Boolean)
.join("/")}: ${e.summary}`,
)
.join("\n");
const recorded = recordSelfFeedback(
{
kind: FLOW_AUDIT_ROLLUP_KIND,
severity: "high",
summary: `${failures.length} unresolved flow failures on ${milestoneId} need one recovery fix`,
evidence,
suggestedFix:
"Fix the shared milestone-flow failure instead of filing one item per failed unit. Use the flow audit evidence to repair stale dispatch, missing summary, runaway, or child-process handling.",
acceptanceCriteria:
"AC1: flow audit reports the active milestone/unit and session pointer. AC2: stale dispatched unit with no progress is flagged. AC3: runaway history and child-process hang evidence are preserved. AC4: repeated same-milestone failures stay deduplicated into one open item.",
source: "detector",
occurredIn: { milestone: milestoneId, unitType: "flow-audit" },
},
basePath,
);
if (!recorded) return undefined;
return {
filed: true,
milestoneId,
count: failures.length,
entryId: recorded.entry.id,
};
}
function chooseRecommendedAction(args) {
if (args.staleDispatchedUnits.length > 0) {
const unit = args.staleDispatchedUnits[0];
const session = args.sessionPointer?.sessionFile
? ` ${args.sessionPointer.sessionFile}`
: args.sessionPointer?.sessionId
? ` ${args.sessionPointer.sessionId}`
: "";
return `Inspect session${session} for ${unit.unitType} ${unit.unitId}; if no new output exists, stop/requeue the stale dispatched unit before continuing.`;
}
const overBudgetOptional = args.childProcesses.find(
(p) => p.nonBlocking && p.overBudget && !p.killed,
);
if (overBudgetOptional) {
return `Optional ${overBudgetOptional.classification} child pid ${overBudgetOptional.pid} is over budget; it is non-blocking, or rerun with --kill-children to terminate it.`;
}
if (args.lastErrors.length > 0) {
return "Review recent errors before dispatching another unit.";
}
if (args.activeMilestone && !args.activeUnit) {
return `Dispatch or resume the next unit for ${args.activeMilestone.id}.`;
}
return "No flow-auditor action needed.";
}
/**
* Run a flow audit: inspect active unit state, auto.lock, runtime artifacts,
* and child processes to diagnose stuck milestones without human forensic work.
*
* Purpose: satisfy AC1 of sf-moocz9so-4ffov2 — a command that prints active
* milestone/unit, progress age, session pointer, child processes, last errors,
* and recommended action.
*
* Consumer: `/sf doctor flow` command and session_start startup health sweep.
*/
export async function runFlowAudit(basePath, options = {}) {
const nowMs = options.nowMs ?? Date.now();
const staleProgressMs = options.staleProgressMs ?? DEFAULT_STALE_PROGRESS_MS;
const optionalChildBudgetMs =
options.optionalChildBudgetMs ?? DEFAULT_OPTIONAL_CHILD_BUDGET_MS;
const runtimeRoot = sfRoot(basePath);
const warnings = [];
const recommendations = [];
const childProcesses = [];
const lastErrors = readRecentErrors(runtimeRoot, {
nowMs,
maxAgeMs: options.recentErrorMaxAgeMs ?? DEFAULT_RECENT_ERROR_MAX_AGE_MS,
});
const staleDispatchedUnits = [];
let sessionPointer;
let activeMilestone;
const autoLockPath = join(runtimeRoot, "auto.lock");
let activeUnit;
let activePid;
const lockData = readJsonFile(autoLockPath);
if (lockData) {
if (lockData.unitType && lockData.unitId) {
const startedAtMs = parseEpochMs(lockData.startedAt, nowMs);
const parsed = parseUnitId(lockData.unitId);
activeMilestone = { id: parsed.milestone };
activePid =
typeof lockData.pid === "number" && Number.isFinite(lockData.pid)
? lockData.pid
: undefined;
activeUnit = {
unitType: lockData.unitType,
unitId: lockData.unitId,
phase: lockData.phase ?? "unknown",
startedAt: formatIso(startedAtMs) ?? new Date(nowMs).toISOString(),
ageMs: Math.max(0, nowMs - startedAtMs),
progressAgeMs: Math.max(0, nowMs - startedAtMs),
};
if (lockData.sessionId || lockData.sessionFile) {
sessionPointer = {
sessionId: lockData.sessionId,
sessionFile: lockData.sessionFile,
source: "auto.lock",
};
}
}
} else if (existsSync(autoLockPath)) {
warnings.push("Could not parse .sf/auto.lock");
}
const runtimeUnits = readRuntimeUnits(join(runtimeRoot, "runtime", "units"));
let dispatchedCount = 0;
for (const unit of runtimeUnits) {
if (unit.phase === "dispatched") dispatchedCount++;
if (!unit.unitType || !unit.unitId) continue;
const progressBaseMs = parseEpochMs(
unit.lastProgressAt ?? unit.updatedAt ?? unit.startedAt,
nowMs,
);
const progressAgeMs = Math.max(0, nowMs - progressBaseMs);
const lastProgressAt = formatIso(progressBaseMs);
const stale =
unit.phase === "dispatched" && progressAgeMs > staleProgressMs;
if (stale) {
// False-positive guard: if the expected artifact already exists, the unit
// completed successfully but its runtime record was not cleared (#sf-moqv5o7h-vaabu6).
const parsed = parseUnitId(unit.unitId);
let artifactExists = false;
if (
unit.unitType === "complete-slice" &&
parsed.milestone &&
parsed.slice
) {
artifactExists = !!resolveSliceFile(
basePath,
parsed.milestone,
parsed.slice,
"SUMMARY",
);
} else if (
unit.unitType === "execute-task" &&
parsed.milestone &&
parsed.slice &&
parsed.task
) {
artifactExists = !!resolveTaskFile(
basePath,
parsed.milestone,
parsed.slice,
parsed.task,
"SUMMARY",
);
} else if (unit.unitType === "complete-milestone" && parsed.milestone) {
artifactExists = !!resolveMilestoneFile(
basePath,
parsed.milestone,
"SUMMARY",
);
} else if (
(unit.unitType === "plan-slice" || unit.unitType === "replan-slice") &&
parsed.milestone &&
parsed.slice
) {
artifactExists = !!resolveSliceFile(
basePath,
parsed.milestone,
parsed.slice,
"PLAN",
);
} else if (unit.unitType === "plan-milestone" && parsed.milestone) {
artifactExists = !!resolveMilestoneFile(
basePath,
parsed.milestone,
"ROADMAP",
);
}
if (!artifactExists) {
staleDispatchedUnits.push({
unitType: unit.unitType,
unitId: unit.unitId,
phase: unit.phase ?? "unknown",
progressAgeMs,
lastProgressAt,
});
warnings.push(
`Unit ${unit.unitId} has no progress for ${minutes(progressAgeMs)} minutes (phase=${unit.phase}).`,
);
}
}
if (
activeUnit &&
unit.unitType === activeUnit.unitType &&
unit.unitId === activeUnit.unitId
) {
activeUnit.phase = unit.phase ?? activeUnit.phase;
activeUnit.progressAgeMs = progressAgeMs;
activeUnit.lastProgressAt = lastProgressAt;
if (!sessionPointer && (unit.sessionId || unit.sessionFile)) {
sessionPointer = {
sessionId: unit.sessionId,
sessionFile: unit.sessionFile,
source: "runtime-unit",
};
}
}
}
if (dispatchedCount > 1) {
warnings.push(
`${dispatchedCount} units are in dispatched phase simultaneously.`,
);
}
const psRows = await readPsRows(options);
for (const row of psRows) {
const classification = classifyProcess(row, psRows);
if (!shouldIncludeProcess(row, classification, activePid)) continue;
const nonBlocking = isOptionalChild(classification);
const overBudget =
nonBlocking &&
row.ageMs !== undefined &&
row.ageMs > optionalChildBudgetMs;
let action = nonBlocking ? "non-blocking" : "observe";
let killed = false;
let killError;
if (overBudget) {
if (options.killOverBudgetChildren) {
action = "kill";
try {
if (options.killProcess) options.killProcess(row.pid);
else process.kill(row.pid, "SIGTERM");
killed = true;
} catch (err) {
killError = err instanceof Error ? err.message : String(err);
warnings.push(
`Failed to kill over-budget ${classification} child pid ${row.pid}: ${killError}`,
);
}
} else {
warnings.push(
`${classification} child pid ${row.pid} is over budget (${minutes(row.ageMs ?? 0)} minutes).`,
);
}
}
childProcesses.push({
pid: row.pid,
ppid: row.ppid,
cmd: row.cmd,
classification,
ageMs: row.ageMs,
nonBlocking,
overBudget,
action,
killed: killed || undefined,
killError,
});
}
try {
const state = await deriveState(basePath);
if (state.activeMilestone) {
activeMilestone = {
id: state.activeMilestone.id,
title: state.activeMilestone.title,
phase: state.phase,
};
}
if (state.activeMilestone && !activeUnit) {
recommendations.push(
`No active unit detected, but milestone ${state.activeMilestone.id} is active. Consider dispatching the next unit.`,
);
}
} catch {
// State derivation is useful context but not required for the audit.
}
const loopEvidence =
activeUnit &&
buildLoopEvidence(basePath, activeUnit.unitType, activeUnit.unitId);
if (
loopEvidence?.completedPriorTasks.length &&
loopEvidence.missingSummaries.length
) {
warnings.push(
`${loopEvidence.milestoneId}/${loopEvidence.sliceId} has ${loopEvidence.completedPriorTasks.length} completed prior tasks but missing final summary evidence for ${loopEvidence.missingSummaries.join(", ")}.`,
);
}
const feedback = readAllSelfFeedback(basePath);
const milestoneId = activeMilestone?.id;
const runawayHistory = collectRunawayHistory(
runtimeUnits,
feedback,
milestoneId,
);
const repeatedFailureRollup = maybeRecordRepeatedFailureRollup(
basePath,
milestoneId,
feedback,
options,
);
if (repeatedFailureRollup?.filed) {
recommendations.push(
`Filed ${FLOW_AUDIT_ROLLUP_KIND} for ${milestoneId} after ${repeatedFailureRollup.count} repeated failures.`,
);
}
const recommendedAction = chooseRecommendedAction({
activeUnit,
sessionPointer,
staleDispatchedUnits,
childProcesses,
lastErrors,
activeMilestone,
});
if (!recommendations.includes(recommendedAction)) {
recommendations.unshift(recommendedAction);
}
return {
ok:
warnings.length === 0 &&
lastErrors.length === 0 &&
staleDispatchedUnits.length === 0,
activeMilestone,
activeUnit,
sessionPointer,
recommendations,
recommendedAction,
warnings,
childProcesses,
lastErrors,
staleDispatchedUnits,
runawayHistory,
loopEvidence,
repeatedFailureRollup,
};
}
export {
formatEnvironmentReport,
runEnvironmentChecks,
runFullEnvironmentChecks,
} from "./doctor-environment.js";
export {
filterDoctorIssues,
formatDoctorIssuesForPrompt,
formatDoctorReport,
formatDoctorReportJson,
summarizeDoctorIssues,
} from "./doctor-format.js";
export {
computeProgressScore,
computeProgressScoreWithContext,
formatProgressLine,
formatProgressReport,
} from "./progress-score.js";
/**
* Characters that are used as delimiters in SF state management documents
* and should not appear in milestone or slice titles.
*
* - "\u2014" (em dash, U+2014): used as a display separator in STATE.md and other docs.
* A title containing "\u2014" makes the separator ambiguous, corrupting state display
* and confusing the LLM agent that reads and writes these files.
* - "\u2013" (en dash, U+2013): visually similar to em dash; same ambiguity risk.
* - "/" (forward slash, U+002F): used as the path separator in unit IDs (M001/S01)
* and git branch names (sf/M001/S01). A slash in a title can break path resolution.
*/
const TITLE_DELIMITER_RE = /[\u2014\u2013/]/; // em dash, en dash, forward slash
/**
* Validate milestone/slice title against SF state document delimiters.
*
* Flags titles containing em/en dashes or forward slashes, which corrupt
* state documents and branch names. Returns human-readable error or null if safe.
*
* @param title \u2014 the milestone or slice title to validate
* @returns error description or null if title is safe
*/
export function validateTitle(title) {
if (TITLE_DELIMITER_RE.test(title)) {
const found = [];
if (/[\u2014\u2013]/.test(title))
found.push("em/en dash (\u2014 or \u2013)");
if (/\//.test(title)) found.push("forward slash (/)");
return `title contains ${found.join(" and ")}, which conflict with SF state document delimiters`;
}
return null;
}
function validatePreferenceShape(preferences) {
const issues = [];
const listFields = [
"always_use_skills",
"prefer_skills",
"avoid_skills",
"custom_instructions",
];
for (const field of listFields) {
const value = preferences[field];
if (value !== undefined && !Array.isArray(value)) {
issues.push(`${field} must be a list`);
}
}
if (preferences.skill_rules !== undefined) {
if (!Array.isArray(preferences.skill_rules)) {
issues.push("skill_rules must be a list");
} else {
for (const [index, rule] of preferences.skill_rules.entries()) {
if (!rule || typeof rule !== "object") {
issues.push(`skill_rules[${index}] must be an object`);
continue;
}
if (typeof rule.when !== "string") {
issues.push(`skill_rules[${index}].when must be a string`);
}
for (const key of ["use", "prefer", "avoid"]) {
const value = rule[key];
if (value !== undefined && !Array.isArray(value)) {
issues.push(`skill_rules[${index}].${key} must be a list`);
}
}
}
}
}
return issues;
}
/**
* Build STATE.md markdown from derived project state.
*
* Includes active milestone/slice, phase, requirements status, milestone registry,
* recent decisions, blockers, and next action. Exported for pre-dispatch rebuild (#3475).
*/
export function buildStateMarkdown(state) {
const lines = [];
lines.push("# SF State", "");
const activeMilestone = state.activeMilestone
? `${state.activeMilestone.id}: ${state.activeMilestone.title}`
: "None";
const activeSlice = state.activeSlice
? `${state.activeSlice.id}: ${state.activeSlice.title}`
: "None";
lines.push(`**Active Milestone:** ${activeMilestone}`);
lines.push(`**Active Slice:** ${activeSlice}`);
lines.push(`**Phase:** ${state.phase}`);
if (state.requirements) {
lines.push(
`**Requirements Status:** ${state.requirements.active} active \u00b7 ${state.requirements.validated} validated \u00b7 ${state.requirements.deferred} deferred \u00b7 ${state.requirements.outOfScope} out of scope`,
);
}
lines.push("");
lines.push("## Milestone Registry");
for (const entry of state.registry) {
const glyph =
entry.status === "complete"
? "\u2705"
: entry.status === "active"
? "\uD83D\uDD04"
: entry.status === "parked"
? "\u23F8\uFE0F"
: "\u2B1C";
lines.push(`- ${glyph} **${entry.id}:** ${entry.title}`);
}
lines.push("");
lines.push("## Recent Decisions");
if (state.recentDecisions.length > 0) {
for (const decision of state.recentDecisions) lines.push(`- ${decision}`);
} else {
lines.push("- None recorded");
}
lines.push("");
lines.push("## Blockers");
if (state.blockers.length > 0) {
for (const blocker of state.blockers) lines.push(`- ${blocker}`);
} else {
lines.push("- None");
}
lines.push("");
lines.push("## Next Action");
lines.push(state.nextAction || "None");
lines.push("");
return lines.join("\n");
}
async function updateStateFile(basePath, fixesApplied) {
const state = await deriveState(basePath);
const path = resolveSfRootFile(basePath, "STATE");
await saveFile(path, buildStateMarkdown(state));
fixesApplied.push(`updated ${path}`);
}
/**
* Rebuild STATE.md from current disk state.
*
* Invalidates state cache, re-derives from milestone/slice/task directories,
* and rewrites STATE.md. Called from auto-mode post-hooks and doctor recovery paths.
*/
export async function rebuildState(basePath) {
invalidateAllCaches();
const state = await deriveState(basePath);
const path = resolveSfRootFile(basePath, "STATE");
await saveFile(path, buildStateMarkdown(state));
}
function matchesScope(unitId, scope) {
if (!scope) return true;
return unitId === scope || unitId.startsWith(`${scope}/`);
}
function shouldRunDeepMilestoneDoctorChecks(milestoneId, state, scope) {
if (scope) {
return (
matchesScope(milestoneId, scope) || scope.startsWith(`${milestoneId}/`)
);
}
return milestoneId === state.activeMilestone?.id;
}
function isLegacySlugDuplicateSliceDir(slicesDir, entry) {
const match = entry.match(/^(S\d+)-.+$/);
return Boolean(match && existsSync(join(slicesDir, match[1])));
}
function auditRequirements(content, options = {}) {
if (!content) return [];
const issues = [];
const blocks = content.split(/^###\s+/m).slice(1);
for (const block of blocks) {
const idMatch = block.match(/^(R\d+)/);
if (!idMatch) continue;
const requirementId = idMatch[1];
const status =
block
.match(/^-\s+Status:\s+(.+)$/m)?.[1]
?.trim()
.toLowerCase() ?? "";
const owner =
block
.match(/^-\s+Primary owning slice:\s+(.+)$/m)?.[1]
?.trim()
.toLowerCase() ?? "";
const notes =
block
.match(/^-\s+Notes:\s+(.+)$/m)?.[1]
?.trim()
.toLowerCase() ?? "";
if (
status === "active" &&
(!owner || owner === "none" || owner === "none yet")
) {
if (!options.includeOwnerWarnings) continue;
// #4414: Downgrade to warning. A newly-created requirement has
// primary_owner='' by default until the planning agent wires it to
// a slice via sf_requirement_update. Flagging as error during normal
// planning is noisy — the real failure is when it persists past
// milestone completion, which is covered by other audits.
issues.push({
severity: "warning",
code: "active_requirement_missing_owner",
scope: "project",
unitId: requirementId,
message: `${requirementId} is Active but has no primary owning slice`,
file: relSfRootFile("REQUIREMENTS"),
fixable: false,
});
}
if (status === "blocked" && !notes) {
issues.push({
severity: "warning",
code: "blocked_requirement_missing_reason",
scope: "project",
unitId: requirementId,
message: `${requirementId} is Blocked but has no reason in Notes`,
file: relSfRootFile("REQUIREMENTS"),
fixable: false,
});
}
}
return issues;
}
/**
* Select the doctor scope (milestone or milestone/slice).
*
* Returns requested scope, or auto-detects the active milestone/slice, the first
* non-complete milestone, or undefined if the project has no milestones.
*
* @param requestedScope — user-requested scope; takes precedence if provided
* @returns scope ID (e.g., "M001" or "M001/S01") or undefined
*/
export async function selectDoctorScope(basePath, requestedScope) {
if (requestedScope) return requestedScope;
const state = await deriveState(basePath);
if (state.activeMilestone?.id && state.activeSlice?.id) {
return `${state.activeMilestone.id}/${state.activeSlice.id}`;
}
if (state.activeMilestone?.id) {
return state.activeMilestone.id;
}
const milestonesPath = milestonesDir(basePath);
if (!existsSync(milestonesPath)) return undefined;
for (const milestone of state.registry) {
const roadmapPath = resolveMilestoneFile(basePath, milestone.id, "ROADMAP");
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
if (!roadmapContent) continue;
if (isDbAvailable()) {
const dbSlices = getMilestoneSlices(milestone.id);
const allDone =
dbSlices.length > 0 && dbSlices.every((s) => s.status === "complete");
if (!allDone) return milestone.id;
} else {
const roadmap = parseRoadmap(roadmapContent);
if (!isMilestoneComplete(roadmap)) return milestone.id;
}
}
return state.registry[0]?.id;
}
// ── Helper: circular dependency detection ──────────────────────────────────
function detectCircularDependencies(slices) {
const known = new Set(slices.map((s) => s.id));
const adj = new Map();
for (const s of slices)
adj.set(
s.id,
s.depends.filter((d) => known.has(d)),
);
const state = new Map();
for (const s of slices) state.set(s.id, "unvisited");
const cycles = [];
function dfs(id, path) {
const st = state.get(id);
if (st === "done") return;
if (st === "visiting") {
cycles.push([...path.slice(path.indexOf(id)), id]);
return;
}
state.set(id, "visiting");
for (const dep of adj.get(id) ?? []) dfs(dep, [...path, id]);
state.set(id, "done");
}
for (const s of slices) if (state.get(s.id) === "unvisited") dfs(s.id, []);
return cycles;
}
async function appendDoctorHistory(basePath, report) {
try {
const historyPath = join(sfRoot(basePath), "doctor-history.jsonl");
const errorCount = report.issues.filter(
(i) => i.severity === "error",
).length;
const warningCount = report.issues.filter(
(i) => i.severity === "warning",
).length;
const issueDetails = report.issues
.filter((i) => i.severity === "error" || i.severity === "warning")
.slice(0, 10) // cap to keep JSONL lines bounded
.map((i) => ({
severity: i.severity,
code: i.code,
message: i.message,
unitId: i.unitId,
}));
// Human-readable one-line summary
const summaryParts = [];
if (report.ok) {
summaryParts.push("Clean");
} else {
const counts = [];
if (errorCount > 0)
counts.push(`${errorCount} error${errorCount > 1 ? "s" : ""}`);
if (warningCount > 0)
counts.push(`${warningCount} warning${warningCount > 1 ? "s" : ""}`);
summaryParts.push(counts.join(", "));
}
if (report.fixesApplied.length > 0) {
summaryParts.push(`${report.fixesApplied.length} fixed`);
}
if (issueDetails.length > 0) {
const topIssue =
issueDetails.find((i) => i.severity === "error") ?? issueDetails[0];
summaryParts.push(topIssue.message);
}
const entry = JSON.stringify({
schemaVersion: DOCTOR_HISTORY_SCHEMA_VERSION,
ts: new Date().toISOString(),
ok: report.ok,
errors: errorCount,
warnings: warningCount,
fixes: report.fixesApplied.length,
codes: [...new Set(report.issues.map((i) => i.code))],
issues: issueDetails.length > 0 ? issueDetails : undefined,
fixDescriptions:
report.fixesApplied.length > 0 ? report.fixesApplied : undefined,
scope: report.scope,
summary: summaryParts.join(" · "),
});
const existing = existsSync(historyPath)
? readFileSync(historyPath, "utf-8")
: "";
await saveFile(historyPath, existing + entry + "\n");
} catch {
/* non-fatal */
}
}
/**
* Read the last N doctor history entries from the log.
*
* Returned in reverse chronological order (most-recent-first).
* Returns empty array if history file does not exist.
*
* @param lastN — number of entries to return (default 50)
* @returns history entries, most-recent first
*/
export async function readDoctorHistory(basePath, lastN = 50) {
try {
const historyPath = join(sfRoot(basePath), "doctor-history.jsonl");
if (!existsSync(historyPath)) return [];
const lines = readFileSync(historyPath, "utf-8")
.split("\n")
.filter((l) => l.trim());
return lines
.slice(-lastN)
.reverse()
.flatMap((l) => {
try {
const entry = normalizeDoctorHistoryEntry(JSON.parse(l));
return entry ? [entry] : [];
} catch {
return [];
}
});
} catch {
return [];
}
}
function normalizeDoctorHistoryEntry(entry) {
if (!entry || typeof entry !== "object" || Array.isArray(entry)) return null;
const schemaVersion = entry.schemaVersion ?? DOCTOR_HISTORY_SCHEMA_VERSION;
if (schemaVersion !== DOCTOR_HISTORY_SCHEMA_VERSION) return null;
return {
...entry,
schemaVersion,
};
}
/**
* Run the SF doctor health check suite across git, runtime, environment, and state layers.
*
* Scans for structural issues (orphaned state, circular dependencies, stale locks,
* missing files), environment problems (dependencies, tools, ports), and state corruption.
* Can auto-fix mechanical issues (task-level only, never deletes global state unless fixLevel="all").
* Records history and returns detailed report.
*
* @param options — fixLevel="task" restricts auto-fix to non-global state; "all" unrestricted
* @returns comprehensive report with issues, fixes applied, and per-domain timing
*/
export async function runSFDoctor(basePath, options) {
const issues = [];
const fixesApplied = [];
const fix = options?.fix === true;
const dryRun = options?.dryRun === true;
const fixLevel = options?.fixLevel ?? "all";
// Issue codes that represent completion state transitions — creating summary
// stubs, marking slices/milestones done in the roadmap. These belong to the
// dispatch lifecycle (complete-slice, complete-milestone units), not to
// mechanical post-hook bookkeeping. When fixLevel is "task", these are
// detected and reported but never auto-fixed.
/** Whether a given issue code should be auto-fixed at the current fixLevel. */
const shouldFix = (code) => {
if (!fix || dryRun) return false;
if (fixLevel === "task" && GLOBAL_STATE_CODES.has(code)) return false;
return true;
};
const prefs = loadEffectiveSFPreferences();
if (prefs) {
const prefIssues = validatePreferenceShape(prefs.preferences);
for (const issue of prefIssues) {
issues.push({
severity: "warning",
code: "invalid_preferences",
scope: "project",
unitId: "project",
message: `SF preferences invalid: ${issue}`,
file: prefs.path,
fixable: false,
});
}
}
checkGeneratedArtifactResidue(basePath, issues, fixesApplied, shouldFix);
checkSfFormSyntax(basePath, issues, fixesApplied, shouldFix);
// Git health checks — timed
const t0git = Date.now();
const isolationMode =
options?.isolationMode ??
(prefs?.preferences?.git?.isolation === "worktree"
? "worktree"
: prefs?.preferences?.git?.isolation === "branch"
? "branch"
: "none");
await checkGitHealth(
basePath,
issues,
fixesApplied,
shouldFix,
isolationMode,
);
const gitMs = Date.now() - t0git;
// Runtime health checks — timed
const t0runtime = Date.now();
await checkRuntimeHealth(basePath, issues, fixesApplied, shouldFix);
const runtimeMs = Date.now() - t0runtime;
// Global health checks — cross-project state (e.g. orphaned project state dirs)
await checkGlobalHealth(issues, fixesApplied, shouldFix);
// Environment health checks — timed
const t0env = Date.now();
await checkEnvironmentHealth(basePath, issues, {
includeRemote: !options?.scope,
includeBuild: options?.includeBuild,
includeTests: options?.includeTests,
shouldFix,
fixesApplied,
});
const envMs = Date.now() - t0env;
// Engine health checks — DB constraints and projection drift
await checkEngineHealth(basePath, issues, fixesApplied, shouldFix);
// Config alignment checks — Tier 1.4 config schema validation
await checkConfigHealth(issues, fixesApplied, shouldFix);
// Vault setup checks — Tier 1.1 vault secret resolver
checkVaultHealth(issues, shouldFix);
// Singularity Memory checks — Tier 1.2 optional federation
await checkSmHealth(issues, shouldFix);
// Turn status markers — Tier 2.5 agent semantic state signaling
await checkTurnStatusHealth(issues, shouldFix);
const milestonesPath = milestonesDir(basePath);
if (!existsSync(milestonesPath)) {
const report = {
ok: issues.every((i) => i.severity !== "error"),
basePath,
issues,
fixesApplied,
timing: {
git: gitMs,
runtime: runtimeMs,
environment: envMs,
sfState: 0,
},
};
await appendDoctorHistory(basePath, report);
return report;
}
const requirementsPath = resolveSfRootFile(basePath, "REQUIREMENTS");
const requirementsContent = await loadFile(requirementsPath);
const t0state = Date.now();
const state = await deriveState(basePath);
issues.push(
...auditRequirements(requirementsContent, {
includeOwnerWarnings: Boolean(options?.scope),
}),
);
// Provider / auth health checks — only relevant when there is active work to dispatch.
// Skipped for idle projects (no active milestone) to avoid noise in environments
// where CI/test runners have no API key configured.
if (state.activeMilestone) {
try {
const providerResults = runProviderChecks();
for (const result of providerResults) {
if (!result.required) continue;
if (result.status === "error") {
issues.push({
severity: "warning",
code: "provider_key_missing",
scope: "project",
unitId: "project",
message:
result.message + (result.detail ? `${result.detail}` : ""),
fixable: false,
});
} else if (result.status === "warning") {
issues.push({
severity: "warning",
code: "provider_key_backedoff",
scope: "project",
unitId: "project",
message:
result.message + (result.detail ? `${result.detail}` : ""),
fixable: false,
});
}
}
} catch {
// Non-fatal — provider check failure should not block other checks
}
}
for (const milestone of state.registry) {
const milestoneId = milestone.id;
const milestonePath = resolveMilestonePath(basePath, milestoneId);
if (!milestonePath) continue;
const runDeepChecks = shouldRunDeepMilestoneDoctorChecks(
milestoneId,
state,
options?.scope,
);
if (!runDeepChecks) continue;
// Validate milestone title for delimiter characters that break state documents.
const milestoneTitleIssue = validateTitle(milestone.title);
if (milestoneTitleIssue) {
const roadmapFile = resolveMilestoneFile(
basePath,
milestoneId,
"ROADMAP",
);
let wasFixed = false;
if (shouldFix("delimiter_in_title") && roadmapFile) {
try {
const raw = readFileSync(roadmapFile, "utf-8");
// Replace em/en dashes with " - " in the H1 title line only
const sanitized = raw.replace(/^(# .*)$/m, (line) =>
line.replace(/[\u2014\u2013]/g, "-"),
);
if (sanitized !== raw) {
await saveFile(roadmapFile, sanitized);
fixesApplied.push(
`sanitized delimiter characters in ${milestoneId} title`,
);
wasFixed = true;
}
} catch {
/* non-fatal — report the warning below */
}
}
if (!wasFixed) {
issues.push({
severity: "warning",
code: "delimiter_in_title",
scope: "milestone",
unitId: milestoneId,
message: `Milestone ${milestoneId} ${milestoneTitleIssue}. Rename the milestone to remove these characters to prevent state corruption.`,
file: relMilestoneFile(basePath, milestoneId, "ROADMAP"),
fixable: true,
});
}
}
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
if (!roadmapContent) continue;
let slices;
if (isDbAvailable()) {
const dbSlices = getMilestoneSlices(milestoneId);
slices = dbSlices.map((s) => ({
id: s.id,
title: s.title,
done: isClosedStatus(s.status),
pending: s.status === "pending",
skipped: s.status === "skipped",
risk: s.risk || "medium",
depends: s.depends,
demo: s.demo,
}));
} else {
const activeMilestoneId = state.activeMilestone?.id;
const activeSliceId = state.activeSlice?.id;
slices = parseRoadmap(roadmapContent).slices.map((s) => ({
...s,
// Legacy roadmaps only encode done vs not-done. For doctor's
// missing-directory checks, treat every undone slice except the
// current active slice as effectively pending/unstarted.
pending:
!s.done &&
(milestoneId !== activeMilestoneId || s.id !== activeSliceId),
}));
}
// Wrap in Roadmap-compatible shape for detectCircularDependencies
const roadmap = { slices };
// ── Circular dependency detection ──────────────────────────────────────
for (const cycle of detectCircularDependencies(roadmap.slices)) {
issues.push({
severity: "error",
code: "circular_slice_dependency",
scope: "milestone",
unitId: milestoneId,
message: `Circular dependency detected: ${cycle.join(" → ")}`,
file: relMilestoneFile(basePath, milestoneId, "ROADMAP"),
fixable: false,
});
}
// ── Orphaned slice directories ─────────────────────────────────────────
try {
const slicesDir = join(milestonePath, "slices");
if (existsSync(slicesDir)) {
const knownSliceIds = new Set(roadmap.slices.map((s) => s.id));
for (const entry of readdirSync(slicesDir)) {
try {
if (!lstatSync(join(slicesDir, entry)).isDirectory()) continue;
} catch {
continue;
}
if (!knownSliceIds.has(entry)) {
if (isLegacySlugDuplicateSliceDir(slicesDir, entry)) continue;
issues.push({
severity: "warning",
code: "orphaned_slice_directory",
scope: "milestone",
unitId: milestoneId,
message: `Directory "${entry}" exists in ${milestoneId}/slices/ but is not referenced in the roadmap`,
file: `${relMilestonePath(basePath, milestoneId)}/slices/${entry}`,
fixable: false,
});
}
}
}
} catch {
/* non-fatal */
}
for (const slice of roadmap.slices) {
const unitId = `${milestoneId}/${slice.id}`;
if (
options?.scope &&
!matchesScope(unitId, options.scope) &&
options.scope !== milestoneId
)
continue;
// Validate slice title for delimiter characters.
const sliceTitleIssue = validateTitle(slice.title);
if (sliceTitleIssue) {
// Slice titles live inside the roadmap H1/checkbox lines — the milestone-level
// fix above already sanitizes the roadmap file. For slices we only report, because
// the title comes from the checkbox text and requires careful regex to fix safely.
issues.push({
severity: "warning",
code: "delimiter_in_title",
scope: "slice",
unitId,
message: `Slice ${unitId} ${sliceTitleIssue}. Rename the slice to remove these characters to prevent state corruption.`,
file: relMilestoneFile(basePath, milestoneId, "ROADMAP"),
fixable: false,
});
}
// Check for unresolvable dependency IDs
const knownSliceIds = new Set(roadmap.slices.map((s) => s.id));
for (const dep of slice.depends) {
if (!knownSliceIds.has(dep)) {
issues.push({
severity: "warning",
code: "unresolvable_dependency",
scope: "slice",
unitId,
message: `Slice ${unitId} depends on "${dep}" which is not a slice ID in this roadmap. This permanently blocks the slice. Use comma-separated IDs: \`depends:[S01,S02]\``,
file: relMilestoneFile(basePath, milestoneId, "ROADMAP"),
fixable: false,
});
}
}
const slicePath = resolveSlicePath(basePath, milestoneId, slice.id);
if (!slicePath) {
// Pending slices haven't been planned yet — directories are created
// lazily by ensurePreconditions() at dispatch time. Skipped slices are
// intentionally allowed to remain summary-less and directory-less.
if (slice.pending || slice.skipped) continue;
const expectedPath = relSlicePath(basePath, milestoneId, slice.id);
issues.push({
severity: slice.done ? "warning" : "error",
code: "missing_slice_dir",
scope: "slice",
unitId,
message: slice.done
? `Missing slice directory for ${unitId} (slice is complete — cosmetic only)`
: `Missing slice directory for ${unitId}`,
file: expectedPath,
fixable: true,
});
if (fix) {
const absoluteSliceDir = join(milestonePath, "slices", slice.id);
mkdirSync(absoluteSliceDir, { recursive: true });
fixesApplied.push(`created ${absoluteSliceDir}`);
}
continue;
}
const tasksDir = resolveTasksDir(basePath, milestoneId, slice.id);
if (!tasksDir) {
// Pending slices haven't been planned yet — tasks/ is created on demand.
// Skipped slices may legitimately never create tasks/.
if (slice.pending || slice.skipped) continue;
issues.push({
severity: slice.done ? "warning" : "error",
code: "missing_tasks_dir",
scope: "slice",
unitId,
message: slice.done
? `Missing tasks directory for ${unitId} (slice is complete \u2014 cosmetic only)`
: `Missing tasks directory for ${unitId}`,
file: relSlicePath(basePath, milestoneId, slice.id),
fixable: true,
});
if (fix) {
mkdirSync(join(slicePath, "tasks"), { recursive: true });
fixesApplied.push(`created ${join(slicePath, "tasks")}`);
}
}
const planPath = resolveSliceFile(
basePath,
milestoneId,
slice.id,
"PLAN",
);
const planContent = planPath ? await loadFile(planPath) : null;
// Normalize plan tasks: prefer DB, fall back to parsers
let plan = null;
if (isDbAvailable()) {
const dbTasks = getSliceTasks(milestoneId, slice.id);
if (dbTasks.length > 0) {
plan = {
tasks: dbTasks.map((t) => ({
id: t.id,
done: t.status === "complete" || t.status === "done",
title: t.title,
estimate: t.estimate || undefined,
})),
};
}
}
if (!plan && planContent) {
plan = parsePlan(planContent);
}
if (!plan) {
if (!slice.done) {
issues.push({
severity: "warning",
code: "missing_slice_plan",
scope: "slice",
unitId,
message: `Slice ${unitId} has no plan file`,
file: relSliceFile(basePath, milestoneId, slice.id, "PLAN"),
fixable: false,
});
}
continue;
}
// ── Duplicate task IDs ───────────────────────────────────────────────
const taskIdCounts = new Map();
for (const task of plan.tasks)
taskIdCounts.set(task.id, (taskIdCounts.get(task.id) ?? 0) + 1);
for (const [taskId, count] of taskIdCounts) {
if (count > 1) {
issues.push({
severity: "error",
code: "duplicate_task_id",
scope: "slice",
unitId,
message: `Task ID "${taskId}" appears ${count} times in ${slice.id}-PLAN.md — duplicate IDs cause dispatch failures`,
file: relSliceFile(basePath, milestoneId, slice.id, "PLAN"),
fixable: false,
});
}
}
// ── Task files on disk not in plan ────────────────────────────────────
try {
if (tasksDir) {
const planTaskIds = new Set(plan.tasks.map((t) => t.id));
for (const f of readdirSync(tasksDir)) {
if (!f.endsWith("-SUMMARY.md")) continue;
const diskTaskId = f.replace(/-SUMMARY\.md$/, "");
if (!planTaskIds.has(diskTaskId)) {
issues.push({
severity: "info",
code: "task_file_not_in_plan",
scope: "slice",
unitId,
message: `Task summary "${f}" exists on disk but "${diskTaskId}" is not in ${slice.id}-PLAN.md`,
file: relTaskFile(
basePath,
milestoneId,
slice.id,
diskTaskId,
"SUMMARY",
),
fixable: false,
});
}
}
}
} catch {
/* non-fatal */
}
// ── Single-task DB/disk ID drift ───────────────────────────────────
// A killed plan-slice can leave DB state pointing at the intended task ID
// while the task plan exists under a generated ordinal ID. In that state
// dispatch conservatively re-runs plan-slice forever because the active
// task's PLAN file is missing. If there is exactly one DB task and one
// task PLAN file, copying the orphan plan to the DB task ID is safe and
// preserves the original file for audit.
try {
if (tasksDir && plan.tasks.length === 1) {
const task = plan.tasks[0];
const expectedPlanPath = resolveTaskFile(
basePath,
milestoneId,
slice.id,
task.id,
"PLAN",
);
const hasExpectedPlan = !!(
expectedPlanPath && existsSync(expectedPlanPath)
);
if (!hasExpectedPlan) {
const planFiles = readdirSync(tasksDir).filter((f) =>
f.endsWith("-PLAN.md"),
);
if (planFiles.length === 1) {
const sourceFile = planFiles[0];
const sourceTaskId = sourceFile.replace(/-PLAN\.md$/, "");
const sourceAbs = join(tasksDir, sourceFile);
const targetAbs = join(tasksDir, `${task.id}-PLAN.md`);
issues.push({
severity: "error",
code: "task_plan_id_drift",
scope: "task",
unitId: `${unitId}/${task.id}`,
message: `Task ${task.id} is active in DB, but the only task plan on disk is ${sourceFile}. This makes autonomous redispatch plan-slice instead of execute-task.`,
file: relTaskFile(
basePath,
milestoneId,
slice.id,
task.id,
"PLAN",
),
fixable: true,
});
if (shouldFix("task_plan_id_drift")) {
copyFileSync(sourceAbs, targetAbs);
fixesApplied.push(
`copied ${sourceTaskId}-PLAN.md to ${task.id}-PLAN.md for ${unitId}`,
);
}
}
}
}
} catch {
/* non-fatal */
}
let allTasksDone = plan.tasks.length > 0;
for (const task of plan.tasks) {
const taskUnitId = `${unitId}/${task.id}`;
const summaryPath = resolveTaskFile(
basePath,
milestoneId,
slice.id,
task.id,
"SUMMARY",
);
const hasSummary = !!(summaryPath && (await loadFile(summaryPath)));
// Must-have verification
if (task.done && hasSummary) {
const taskPlanPath = resolveTaskFile(
basePath,
milestoneId,
slice.id,
task.id,
"PLAN",
);
if (taskPlanPath) {
const taskPlanContent = await loadFile(taskPlanPath);
if (taskPlanContent) {
const mustHaves = parseTaskPlanMustHaves(taskPlanContent);
if (mustHaves.length > 0) {
const summaryContent = await loadFile(summaryPath);
const mentionedCount = summaryContent
? countMustHavesMentionedInSummary(mustHaves, summaryContent)
: 0;
if (mentionedCount < mustHaves.length) {
issues.push({
severity: "warning",
code: "task_done_must_haves_not_verified",
scope: "task",
unitId: taskUnitId,
message: `Task ${task.id} has ${mustHaves.length} must-haves but summary addresses only ${mentionedCount}`,
file: relTaskFile(
basePath,
milestoneId,
slice.id,
task.id,
"SUMMARY",
),
fixable: false,
});
}
}
}
}
}
// ── Future timestamp check ─────────────────────────────────────
if (task.done && hasSummary && summaryPath) {
try {
const rawSummary = await loadFile(summaryPath);
const m = rawSummary?.match(/^completed_at:\s*(.+)$/m);
if (m) {
const ts = new Date(m[1].trim());
if (
!Number.isNaN(ts.getTime()) &&
ts.getTime() > Date.now() + 24 * 60 * 60 * 1000
) {
issues.push({
severity: "warning",
code: "future_timestamp",
scope: "task",
unitId: taskUnitId,
message: `Task ${task.id} has completed_at "${m[1].trim()}" which is more than 24h in the future`,
file: relTaskFile(
basePath,
milestoneId,
slice.id,
task.id,
"SUMMARY",
),
fixable: false,
});
}
}
} catch {
/* non-fatal */
}
}
allTasksDone = allTasksDone && task.done;
}
// Blocker-without-replan detection
// Skip when all tasks are done — the blocker was implicitly resolved
// within the task and the slice is not stuck (#3105 Bug 2).
const replanPath = resolveSliceFile(
basePath,
milestoneId,
slice.id,
"REPLAN",
);
if (!replanPath && !allTasksDone) {
for (const task of plan.tasks) {
if (!task.done) continue;
const summaryPath = resolveTaskFile(
basePath,
milestoneId,
slice.id,
task.id,
"SUMMARY",
);
if (!summaryPath) continue;
const summaryContent = await loadFile(summaryPath);
if (!summaryContent) continue;
const summary = parseSummary(summaryContent);
if (summary.frontmatter.blocker_discovered) {
issues.push({
severity: "warning",
code: "blocker_discovered_no_replan",
scope: "slice",
unitId,
message: `Task ${task.id} reported blocker_discovered but no REPLAN.md exists for ${slice.id} \u2014 slice may be stuck`,
file: relSliceFile(basePath, milestoneId, slice.id, "REPLAN"),
fixable: false,
});
break;
}
}
}
// ── Stale REPLAN: exists but all tasks done ────────────────────────
if (replanPath && allTasksDone) {
issues.push({
severity: "info",
code: "stale_replan_file",
scope: "slice",
unitId,
message: `${slice.id} has a REPLAN.md but all tasks are done — REPLAN.md may be stale`,
file: relSliceFile(basePath, milestoneId, slice.id, "REPLAN"),
fixable: false,
});
}
}
// Milestone-level check: all slices done but no validation file
const milestoneComplete =
roadmap.slices.length > 0 && roadmap.slices.every((s) => s.done);
if (
milestoneComplete &&
!resolveMilestoneFile(basePath, milestoneId, "VALIDATION") &&
!resolveMilestoneFile(basePath, milestoneId, "SUMMARY")
) {
issues.push({
severity: "info",
code: "all_slices_done_missing_milestone_validation",
scope: "milestone",
unitId: milestoneId,
message: `All slices are done but ${milestoneId}-VALIDATION.md is missing \u2014 milestone is in validating-milestone phase`,
file: relMilestoneFile(basePath, milestoneId, "VALIDATION"),
fixable: false,
});
}
// Milestone-level check: all slices done but no milestone summary
if (
milestoneComplete &&
!resolveMilestoneFile(basePath, milestoneId, "SUMMARY")
) {
issues.push({
severity: "warning",
code: "all_slices_done_missing_milestone_summary",
scope: "milestone",
unitId: milestoneId,
message: `All slices are done but ${milestoneId}-SUMMARY.md is missing \u2014 milestone is stuck in completing-milestone phase`,
file: relMilestoneFile(basePath, milestoneId, "SUMMARY"),
fixable: false,
});
}
}
if (fix && !dryRun && fixesApplied.length > 0) {
await updateStateFile(basePath, fixesApplied);
}
const report = {
ok: issues.every((issue) => issue.severity !== "error"),
basePath,
issues,
fixesApplied,
timing: {
git: gitMs,
runtime: runtimeMs,
environment: envMs,
sfState: Math.max(0, Date.now() - t0state),
},
};
await appendDoctorHistory(basePath, report);
return report;
}