feat(gsd): add 13 enhancements to /gsd doctor (#1583)

New detections:
- Circular dependency detection (DFS cycle check on slice depends:[])
- Orphaned slice directories (dirs not in roadmap)
- Duplicate task IDs in plan files
- Task summary files on disk not in plan (info)
- Stale REPLAN.md when all tasks are done (info)
- Metrics ledger corruption (version != 1 or units not array)
- Large planning files >100KB (warning)
- Future completed_at timestamps >24h ahead (warning)

New modes and output:
- --dry-run flag: reports [dry-run] would fix entries without writing
- --json flag: formatDoctorReportJson() for CI/tooling integration
- --build / --test flags: opt-in slow checkBuildHealth/checkTestHealth
- Per-check timing: timing.{git,runtime,environment,gsdState} on DoctorReport
- Doctor history: appends compact JSONL entry to .gsd/doctor-history.jsonl;
  exports readDoctorHistory() for programmatic access

Tests: 27 new test scenarios in doctor-enhancements.test.ts covering all features
This commit is contained in:
Jeremy McSpadden 2026-03-20 09:12:09 -05:00 committed by GitHub
parent b580f64144
commit e35ad9d194
8 changed files with 657 additions and 17 deletions

View file

@ -15,6 +15,7 @@ import { appendOverride, appendKnowledge } from "./files.js";
import {
formatDoctorIssuesForPrompt,
formatDoctorReport,
formatDoctorReportJson,
runGSDDoctor,
selectDoctorScope,
filterDoctorIssues,
@ -43,16 +44,30 @@ export function dispatchDoctorHeal(pi: ExtensionAPI, scope: string | undefined,
export async function handleDoctor(args: string, ctx: ExtensionCommandContext, pi: ExtensionAPI): Promise<void> {
const trimmed = args.trim();
const parts = trimmed ? trimmed.split(/\s+/) : [];
// Extract flags before positional parsing
const jsonMode = trimmed.includes("--json");
const dryRun = trimmed.includes("--dry-run");
const includeBuild = trimmed.includes("--build");
const includeTests = trimmed.includes("--test");
const stripped = trimmed.replace(/--json|--dry-run|--build|--test/g, "").trim();
const parts = stripped ? stripped.split(/\s+/) : [];
const mode = parts[0] === "fix" || parts[0] === "heal" || parts[0] === "audit" ? parts[0] : "doctor";
const requestedScope = mode === "doctor" ? parts[0] : parts[1];
const scope = await selectDoctorScope(projectRoot(), requestedScope);
const effectiveScope = mode === "audit" ? requestedScope : scope;
const report = await runGSDDoctor(projectRoot(), {
fix: mode === "fix" || mode === "heal",
fix: mode === "fix" || mode === "heal" || dryRun,
dryRun,
scope: effectiveScope,
includeBuild,
includeTests,
});
if (jsonMode) {
ctx.ui.notify(formatDoctorReportJson(report), "info");
return;
}
const reportText = formatDoctorReport(report, {
scope: effectiveScope,
includeWarnings: mode === "audit",

View file

@ -513,6 +513,10 @@ export function registerGSDCommand(pi: ExtensionAPI): void {
{ cmd: "fix", desc: "Auto-fix detected issues" },
{ cmd: "heal", desc: "AI-driven deep healing" },
{ cmd: "audit", desc: "Run health audit without fixing" },
{ cmd: "--dry-run", desc: "Show what --fix would change without applying" },
{ cmd: "--json", desc: "Output report as JSON (CI/tooling friendly)" },
{ cmd: "--build", desc: "Include slow build health check (npm run build)" },
{ cmd: "--test", desc: "Include slow test health check (npm test)" },
];
if (parts.length <= 2) {

View file

@ -657,6 +657,81 @@ export async function checkRuntimeHealth(
} catch {
// Non-fatal — external state check failed
}
// ── Metrics ledger integrity ───────────────────────────────────────────
try {
const metricsPath = join(root, "metrics.json");
if (existsSync(metricsPath)) {
try {
const raw = readFileSync(metricsPath, "utf-8");
const ledger = JSON.parse(raw);
if (ledger.version !== 1 || !Array.isArray(ledger.units)) {
issues.push({
severity: "warning",
code: "metrics_ledger_corrupt",
scope: "project",
unitId: "project",
message: "metrics.json has an unexpected structure (version !== 1 or units is not an array) — metrics data may be unreliable",
file: ".gsd/metrics.json",
fixable: false,
});
}
} catch {
issues.push({
severity: "warning",
code: "metrics_ledger_corrupt",
scope: "project",
unitId: "project",
message: "metrics.json is not valid JSON — metrics data may be corrupt",
file: ".gsd/metrics.json",
fixable: false,
});
}
}
} catch {
// Non-fatal — metrics check failed
}
// ── Large planning file detection ──────────────────────────────────────
// Files over 100KB can cause LLM context pressure. Report the worst offenders.
try {
const MAX_FILE_BYTES = 100 * 1024; // 100KB
const milestonesPath = milestonesDir(basePath);
if (existsSync(milestonesPath)) {
const largeFiles: Array<{ path: string; sizeKB: number }> = [];
function scanForLargeFiles(dir: string, depth = 0): void {
if (depth > 6) return;
try {
for (const entry of readdirSync(dir)) {
const full = join(dir, entry);
try {
const s = statSync(full);
if (s.isDirectory()) { scanForLargeFiles(full, depth + 1); continue; }
if (entry.endsWith(".md") && s.size > MAX_FILE_BYTES) {
largeFiles.push({ path: full.replace(basePath + "/", ""), sizeKB: Math.round(s.size / 1024) });
}
} catch { /* skip entry */ }
}
} catch { /* skip dir */ }
}
scanForLargeFiles(milestonesPath);
if (largeFiles.length > 0) {
largeFiles.sort((a, b) => b.sizeKB - a.sizeKB);
const worst = largeFiles[0]!;
issues.push({
severity: "warning",
code: "large_planning_file",
scope: "project",
unitId: "project",
message: `${largeFiles.length} planning file(s) exceed 100KB — largest: ${worst.path} (${worst.sizeKB}KB). Large files cause LLM context pressure.`,
file: worst.path,
fixable: false,
});
}
}
} catch {
// Non-fatal — large file scan failed
}
}
/**

View file

@ -407,6 +407,63 @@ function checkGitRemote(basePath: string): EnvironmentCheckResult | null {
return { name: "git_remote", status: "ok", message: "Git remote reachable" };
}
/**
* Check if the project build passes (opt-in slow check, use --build flag).
* Runs npm run build and reports failure as env_build.
*/
function checkBuildHealth(basePath: string): EnvironmentCheckResult | null {
const pkgPath = join(basePath, "package.json");
if (!existsSync(pkgPath)) return null;
try {
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
const buildScript = pkg.scripts?.build;
if (!buildScript) return null;
const result = tryExec("npm run build 2>&1", basePath);
if (result === null) {
return {
name: "build",
status: "error",
message: "Build failed — npm run build exited non-zero",
detail: "Fix build errors before dispatching work",
};
}
return { name: "build", status: "ok", message: "Build passes" };
} catch {
return null;
}
}
/**
* Check if tests pass (opt-in slow check, use --test flag).
* Runs npm test and reports failures as env_test.
*/
function checkTestHealth(basePath: string): EnvironmentCheckResult | null {
const pkgPath = join(basePath, "package.json");
if (!existsSync(pkgPath)) return null;
try {
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
const testScript = pkg.scripts?.test;
// Skip if no test script or the default placeholder
if (!testScript || testScript.includes("no test specified")) return null;
const result = tryExec("npm test 2>&1", basePath);
if (result === null) {
return {
name: "test",
status: "warning",
message: "Tests failing — npm test exited non-zero",
detail: "Fix failing tests before shipping",
};
}
return { name: "test", status: "ok", message: "Tests pass" };
} catch {
return null;
}
}
// ── Public API ─────────────────────────────────────────────────────────────
/**
@ -454,6 +511,26 @@ export function runFullEnvironmentChecks(basePath: string): EnvironmentCheckResu
return results;
}
/**
* Run slow opt-in checks (build and/or test).
* These are never run on the pre-dispatch gate only on explicit /gsd doctor --build/--test.
*/
export function runSlowEnvironmentChecks(
basePath: string,
options?: { includeBuild?: boolean; includeTests?: boolean },
): EnvironmentCheckResult[] {
const results: EnvironmentCheckResult[] = [];
if (options?.includeBuild) {
const buildCheck = checkBuildHealth(basePath);
if (buildCheck) results.push(buildCheck);
}
if (options?.includeTests) {
const testCheck = checkTestHealth(basePath);
if (testCheck) results.push(testCheck);
}
return results;
}
/**
* Convert environment check results to DoctorIssue format for the doctor pipeline.
*/
@ -477,12 +554,16 @@ export function environmentResultsToDoctorIssues(results: EnvironmentCheckResult
export async function checkEnvironmentHealth(
basePath: string,
issues: DoctorIssue[],
options?: { includeRemote?: boolean },
options?: { includeRemote?: boolean; includeBuild?: boolean; includeTests?: boolean },
): Promise<void> {
const results = options?.includeRemote
? runFullEnvironmentChecks(basePath)
: runEnvironmentChecks(basePath);
if (options?.includeBuild || options?.includeTests) {
results.push(...runSlowEnvironmentChecks(basePath, options));
}
issues.push(...environmentResultsToDoctorIssues(results));
}

View file

@ -76,3 +76,23 @@ export function formatDoctorIssuesForPrompt(issues: DoctorIssue[]): string {
return `- [${prefix}] ${issue.unitId} | ${issue.code} | ${issue.message}${issue.file ? ` | file: ${issue.file}` : ""} | fixable: ${issue.fixable ? "yes" : "no"}`;
}).join("\n");
}
/**
* Serialize a doctor report to JSON suitable for CI/tooling integration.
* Usage: /gsd doctor --json
*/
export function formatDoctorReportJson(report: DoctorReport): string {
return JSON.stringify(
{
ok: report.ok,
basePath: report.basePath,
generatedAt: new Date().toISOString(),
summary: summarizeDoctorIssues(report.issues),
issues: report.issues,
fixesApplied: report.fixesApplied,
...(report.timing ? { timing: report.timing } : {}),
},
null,
2,
);
}

View file

@ -53,7 +53,20 @@ export type DoctorIssueCode =
| "stranded_lock_directory"
// Git / worktree integrity checks
| "integration_branch_missing"
| "worktree_directory_orphaned";
| "worktree_directory_orphaned"
// GSD state structural checks
| "circular_slice_dependency"
| "orphaned_slice_directory"
| "duplicate_task_id"
| "task_file_not_in_plan"
| "stale_replan_file"
| "future_timestamp"
// Runtime data integrity
| "metrics_ledger_corrupt"
| "large_planning_file"
// Slow environment checks (opt-in via --build / --test flags)
| "env_build"
| "env_test";
/**
* Issue codes that represent expected completion-transition states.
@ -83,6 +96,8 @@ export interface DoctorReport {
basePath: string;
issues: DoctorIssue[];
fixesApplied: string[];
/** Per-domain check durations in milliseconds. Present on explicit /gsd doctor runs. */
timing?: { git: number; runtime: number; environment: number; gsdState: number };
}
export interface DoctorSummary {

View file

@ -1,14 +1,15 @@
import { existsSync, mkdirSync } from "node:fs";
import { existsSync, mkdirSync, lstatSync, readdirSync, readFileSync } from "node:fs";
import { join } from "node:path";
import { loadFile, parsePlan, parseRoadmap, parseSummary, saveFile, parseTaskPlanMustHaves, countMustHavesMentionedInSummary } from "./files.js";
import { resolveMilestoneFile, resolveMilestonePath, resolveSliceFile, resolveSlicePath, resolveTaskFile, resolveTasksDir, milestonesDir, gsdRoot, relMilestoneFile, relSliceFile, relTaskFile, relSlicePath, relGsdRootFile, resolveGsdRootFile } from "./paths.js";
import { resolveMilestoneFile, resolveMilestonePath, resolveSliceFile, resolveSlicePath, resolveTaskFile, resolveTasksDir, milestonesDir, gsdRoot, relMilestoneFile, relSliceFile, relTaskFile, relSlicePath, relGsdRootFile, resolveGsdRootFile, relMilestonePath } from "./paths.js";
import { deriveState, isMilestoneComplete } from "./state.js";
import { invalidateAllCaches } from "./cache.js";
import { loadEffectiveGSDPreferences, type GSDPreferences } from "./preferences.js";
import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js";
import type { DoctorIssue, DoctorIssueCode, DoctorReport } from "./doctor-types.js";
import { COMPLETION_TRANSITION_CODES } from "./doctor-types.js";
import type { RoadmapSliceEntry } from "./types.js";
import { checkGitHealth, checkRuntimeHealth } from "./doctor-checks.js";
import { checkEnvironmentHealth } from "./doctor-environment.js";
import { runProviderChecks } from "./doctor-providers.js";
@ -17,7 +18,7 @@ import { runProviderChecks } from "./doctor-providers.js";
// All public types and functions from extracted modules are re-exported here
// so that existing imports from "./doctor.js" continue to work unchanged.
export type { DoctorSeverity, DoctorIssueCode, DoctorIssue, DoctorReport, DoctorSummary } from "./doctor-types.js";
export { summarizeDoctorIssues, filterDoctorIssues, formatDoctorReport, formatDoctorIssuesForPrompt } from "./doctor-format.js";
export { summarizeDoctorIssues, filterDoctorIssues, formatDoctorReport, formatDoctorIssuesForPrompt, formatDoctorReportJson } from "./doctor-format.js";
export { runEnvironmentChecks, runFullEnvironmentChecks, formatEnvironmentReport, type EnvironmentCheckResult } from "./doctor-environment.js";
export { computeProgressScore, computeProgressScoreWithContext, formatProgressLine, formatProgressReport, type ProgressScore, type ProgressLevel } from "./progress-score.js";
@ -350,10 +351,60 @@ export async function selectDoctorScope(basePath: string, requestedScope?: strin
return state.registry[0]?.id;
}
export async function runGSDDoctor(basePath: string, options?: { fix?: boolean; scope?: string; fixLevel?: "task" | "all"; isolationMode?: "none" | "worktree" | "branch" }): Promise<import("./doctor-types.js").DoctorReport> {
// ── Helper: circular dependency detection ──────────────────────────────────
function detectCircularDependencies(slices: RoadmapSliceEntry[]): string[][] {
const known = new Set(slices.map(s => s.id));
const adj = new Map<string, string[]>();
for (const s of slices) adj.set(s.id, s.depends.filter(d => known.has(d)));
const state = new Map<string, "unvisited" | "visiting" | "done">();
for (const s of slices) state.set(s.id, "unvisited");
const cycles: string[][] = [];
function dfs(id: string, path: string[]): void {
const st = state.get(id);
if (st === "done") return;
if (st === "visiting") { cycles.push([...path.slice(path.indexOf(id)), id]); return; }
state.set(id, "visiting");
for (const dep of adj.get(id) ?? []) dfs(dep, [...path, id]);
state.set(id, "done");
}
for (const s of slices) if (state.get(s.id) === "unvisited") dfs(s.id, []);
return cycles;
}
// ── Helper: doctor run history ──────────────────────────────────────────────
interface DoctorHistoryEntry { ts: string; ok: boolean; errors: number; warnings: number; fixes: number; codes: string[] }
async function appendDoctorHistory(basePath: string, report: DoctorReport): Promise<void> {
try {
const historyPath = join(gsdRoot(basePath), "doctor-history.jsonl");
const entry = JSON.stringify({
ts: new Date().toISOString(),
ok: report.ok,
errors: report.issues.filter(i => i.severity === "error").length,
warnings: report.issues.filter(i => i.severity === "warning").length,
fixes: report.fixesApplied.length,
codes: [...new Set(report.issues.map(i => i.code))],
} satisfies DoctorHistoryEntry);
const existing = existsSync(historyPath) ? readFileSync(historyPath, "utf-8") : "";
await saveFile(historyPath, existing + entry + "\n");
} catch { /* non-fatal */ }
}
/** Read the last N doctor history entries. Returns most-recent-first. */
export async function readDoctorHistory(basePath: string, lastN = 50): Promise<DoctorHistoryEntry[]> {
try {
const historyPath = join(gsdRoot(basePath), "doctor-history.jsonl");
if (!existsSync(historyPath)) return [];
const lines = readFileSync(historyPath, "utf-8").split("\n").filter(l => l.trim());
return lines.slice(-lastN).reverse().map(l => JSON.parse(l) as DoctorHistoryEntry);
} catch { return []; }
}
export async function runGSDDoctor(basePath: string, options?: { fix?: boolean; dryRun?: boolean; scope?: string; fixLevel?: "task" | "all"; isolationMode?: "none" | "worktree" | "branch"; includeBuild?: boolean; includeTests?: boolean }): Promise<DoctorReport> {
const issues: DoctorIssue[] = [];
const fixesApplied: string[] = [];
const fix = options?.fix === true;
const dryRun = options?.dryRun === true;
const fixLevel = options?.fixLevel ?? "all";
// Issue codes that represent completion state transitions — creating summary
@ -364,11 +415,18 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
/** Whether a given issue code should be auto-fixed at the current fixLevel. */
const shouldFix = (code: DoctorIssueCode): boolean => {
if (!fix) return false;
if (!fix || dryRun) return false;
if (fixLevel === "task" && COMPLETION_TRANSITION_CODES.has(code)) return false;
return true;
};
/** Log a dry-run "would fix" entry when fix=true but dryRun=true. */
const dryRunCanFix = (code: DoctorIssueCode, message: string): void => {
if (dryRun && fix && !(fixLevel === "task" && COMPLETION_TRANSITION_CODES.has(code))) {
fixesApplied.push(`[dry-run] would fix: ${message}`);
}
};
const prefs = loadEffectiveGSDPreferences();
if (prefs) {
const prefIssues = validatePreferenceShape(prefs.preferences);
@ -385,21 +443,33 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
}
}
// Git health checks (orphaned worktrees, stale branches, corrupt merge state, tracked runtime files)
// Git health checks — timed
const t0git = Date.now();
const isolationMode: "none" | "worktree" | "branch" = options?.isolationMode ??
(prefs?.preferences?.git?.isolation === "none" ? "none" :
prefs?.preferences?.git?.isolation === "branch" ? "branch" : "worktree");
await checkGitHealth(basePath, issues, fixesApplied, shouldFix, isolationMode);
const gitMs = Date.now() - t0git;
// Runtime health checks (crash locks, completed-units, hook state, activity logs, STATE.md, gitignore)
// Runtime health checks — timed
const t0runtime = Date.now();
await checkRuntimeHealth(basePath, issues, fixesApplied, shouldFix);
const runtimeMs = Date.now() - t0runtime;
// Environment health checks (#1221: missing tools, port conflicts, stale deps, disk space)
await checkEnvironmentHealth(basePath, issues, { includeRemote: !options?.scope });
// Environment health checks — timed
const t0env = Date.now();
await checkEnvironmentHealth(basePath, issues, {
includeRemote: !options?.scope,
includeBuild: options?.includeBuild,
includeTests: options?.includeTests,
});
const envMs = Date.now() - t0env;
const milestonesPath = milestonesDir(basePath);
if (!existsSync(milestonesPath)) {
return { ok: issues.every(issue => issue.severity !== "error"), basePath, issues, fixesApplied };
const report: DoctorReport = { ok: issues.every(i => i.severity !== "error"), basePath, issues, fixesApplied, timing: { git: gitMs, runtime: runtimeMs, environment: envMs, gsdState: 0 } };
await appendDoctorHistory(basePath, report);
return report;
}
const requirementsPath = resolveGsdRootFile(basePath, "REQUIREMENTS");
@ -465,6 +535,43 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
if (!roadmapContent) continue;
const roadmap = parseRoadmap(roadmapContent);
// ── Circular dependency detection ──────────────────────────────────────
for (const cycle of detectCircularDependencies(roadmap.slices)) {
issues.push({
severity: "error",
code: "circular_slice_dependency",
scope: "milestone",
unitId: milestoneId,
message: `Circular dependency detected: ${cycle.join(" → ")}`,
file: relMilestoneFile(basePath, milestoneId, "ROADMAP"),
fixable: false,
});
}
// ── Orphaned slice directories ─────────────────────────────────────────
try {
const slicesDir = join(milestonePath, "slices");
if (existsSync(slicesDir)) {
const knownSliceIds = new Set(roadmap.slices.map(s => s.id));
for (const entry of readdirSync(slicesDir)) {
try {
if (!lstatSync(join(slicesDir, entry)).isDirectory()) continue;
} catch { continue; }
if (!knownSliceIds.has(entry)) {
issues.push({
severity: "warning",
code: "orphaned_slice_directory",
scope: "milestone",
unitId: milestoneId,
message: `Directory "${entry}" exists in ${milestoneId}/slices/ but is not referenced in the roadmap`,
file: `${relMilestonePath(basePath, milestoneId)}/slices/${entry}`,
fixable: false,
});
}
}
}
} catch { /* non-fatal */ }
for (const slice of roadmap.slices) {
const unitId = `${milestoneId}/${slice.id}`;
if (options?.scope && !matchesScope(unitId, options.scope) && options.scope !== milestoneId) continue;
@ -539,6 +646,33 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
continue;
}
// ── Duplicate task IDs ───────────────────────────────────────────────
const taskIdCounts = new Map<string, number>();
for (const task of plan.tasks) taskIdCounts.set(task.id, (taskIdCounts.get(task.id) ?? 0) + 1);
for (const [taskId, count] of taskIdCounts) {
if (count > 1) {
issues.push({ severity: "error", code: "duplicate_task_id", scope: "slice", unitId,
message: `Task ID "${taskId}" appears ${count} times in ${slice.id}-PLAN.md — duplicate IDs cause dispatch failures`,
file: relSliceFile(basePath, milestoneId, slice.id, "PLAN"), fixable: false });
}
}
// ── Task files on disk not in plan ────────────────────────────────────
try {
if (tasksDir) {
const planTaskIds = new Set(plan.tasks.map(t => t.id));
for (const f of readdirSync(tasksDir)) {
if (!f.endsWith("-SUMMARY.md")) continue;
const diskTaskId = f.replace(/-SUMMARY\.md$/, "");
if (!planTaskIds.has(diskTaskId)) {
issues.push({ severity: "info", code: "task_file_not_in_plan", scope: "slice", unitId,
message: `Task summary "${f}" exists on disk but "${diskTaskId}" is not in ${slice.id}-PLAN.md`,
file: relTaskFile(basePath, milestoneId, slice.id, diskTaskId, "SUMMARY"), fixable: false });
}
}
}
} catch { /* non-fatal */ }
let allTasksDone = plan.tasks.length > 0;
for (const task of plan.tasks) {
const taskUnitId = `${unitId}/${task.id}`;
@ -555,6 +689,7 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
file: relTaskFile(basePath, milestoneId, slice.id, task.id, "SUMMARY"),
fixable: true,
});
dryRunCanFix("task_done_missing_summary", `create stub summary for ${taskUnitId}`);
if (shouldFix("task_done_missing_summary")) {
const stubPath = join(
basePath, ".gsd", "milestones", milestoneId, "slices", slice.id, "tasks",
@ -618,6 +753,22 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
}
}
// ── Future timestamp check ─────────────────────────────────────
if (task.done && hasSummary && summaryPath) {
try {
const rawSummary = await loadFile(summaryPath);
const m = rawSummary?.match(/^completed_at:\s*(.+)$/m);
if (m) {
const ts = new Date(m[1].trim());
if (!isNaN(ts.getTime()) && ts.getTime() > Date.now() + 24 * 60 * 60 * 1000) {
issues.push({ severity: "warning", code: "future_timestamp", scope: "task", unitId: taskUnitId,
message: `Task ${task.id} has completed_at "${m[1].trim()}" which is more than 24h in the future`,
file: relTaskFile(basePath, milestoneId, slice.id, task.id, "SUMMARY"), fixable: false });
}
}
} catch { /* non-fatal */ }
}
allTasksDone = allTasksDone && task.done;
}
@ -646,6 +797,13 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
}
}
// ── Stale REPLAN: exists but all tasks done ────────────────────────
if (replanPath && allTasksDone) {
issues.push({ severity: "info", code: "stale_replan_file", scope: "slice", unitId,
message: `${slice.id} has a REPLAN.md but all tasks are done — REPLAN.md may be stale`,
file: relSliceFile(basePath, milestoneId, slice.id, "REPLAN"), fixable: false });
}
const sliceSummaryPath = resolveSliceFile(basePath, milestoneId, slice.id, "SUMMARY");
const sliceUatPath = join(slicePath, `${slice.id}-UAT.md`);
const hasSliceSummary = !!(sliceSummaryPath && await loadFile(sliceSummaryPath));
@ -661,6 +819,7 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
file: relSliceFile(basePath, milestoneId, slice.id, "SUMMARY"),
fixable: true,
});
dryRunCanFix("all_tasks_done_missing_slice_summary", `create placeholder summary for ${unitId}`);
if (shouldFix("all_tasks_done_missing_slice_summary")) await ensureSliceSummaryStub(basePath, milestoneId, slice.id, fixesApplied);
}
@ -674,6 +833,7 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
file: `${relSlicePath(basePath, milestoneId, slice.id)}/${slice.id}-UAT.md`,
fixable: true,
});
dryRunCanFix("all_tasks_done_missing_slice_uat", `create placeholder UAT for ${unitId}`);
if (shouldFix("all_tasks_done_missing_slice_uat")) await ensureSliceUatStub(basePath, milestoneId, slice.id, fixesApplied);
}
@ -687,6 +847,7 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
file: relMilestoneFile(basePath, milestoneId, "ROADMAP"),
fixable: true,
});
dryRunCanFix("all_tasks_done_roadmap_not_checked", `mark ${slice.id} done in roadmap`);
if (shouldFix("all_tasks_done_roadmap_not_checked") && (hasSliceSummary || issues.some(issue => issue.code === "all_tasks_done_missing_slice_summary" && issue.unitId === unitId))) {
await markSliceDoneInRoadmap(basePath, milestoneId, slice.id, fixesApplied);
}
@ -744,14 +905,17 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
}
}
if (fix && fixesApplied.length > 0) {
if (fix && !dryRun && fixesApplied.length > 0) {
await updateStateFile(basePath, fixesApplied);
}
return {
const report: DoctorReport = {
ok: issues.every(issue => issue.severity !== "error"),
basePath,
issues,
fixesApplied,
timing: { git: gitMs, runtime: runtimeMs, environment: envMs, gsdState: Math.max(0, Date.now() - t0env - envMs) },
};
await appendDoctorHistory(basePath, report);
return report;
}

View file

@ -0,0 +1,266 @@
import { mkdtempSync, mkdirSync, rmSync, writeFileSync, existsSync } from "node:fs";
import { join } from "node:path";
import { tmpdir } from "node:os";
import { runGSDDoctor } from "../doctor.js";
import { formatDoctorReportJson } from "../doctor-format.js";
import { createTestContext } from "./test-helpers.ts";
const { assertEq, assertTrue, assertMatch, report } = createTestContext();
// ── Helpers ─────────────────────────────────────────────────────────────────
function makeBase(): { base: string; gsd: string; mDir: string } {
const base = mkdtempSync(join(tmpdir(), "gsd-doctor-enh-"));
const gsd = join(base, ".gsd");
const mDir = join(gsd, "milestones", "M001");
mkdirSync(join(mDir, "slices"), { recursive: true });
return { base, gsd, mDir };
}
function writeRoadmap(mDir: string, content: string): void {
writeFileSync(join(mDir, "M001-ROADMAP.md"), content);
}
function writeSlice(mDir: string, sliceId: string, planContent: string): string {
const sDir = join(mDir, "slices", sliceId);
const tDir = join(sDir, "tasks");
mkdirSync(tDir, { recursive: true });
writeFileSync(join(sDir, `${sliceId}-PLAN.md`), planContent);
return sDir;
}
async function main(): Promise<void> {
// ── 1. Circular dependency detection ──────────────────────────────────────
console.log("\n=== circular dependency detection ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Circular Test\n\n## Slices\n- [ ] **S01: Slice A** \`risk:low\` \`depends:[S02]\`\n > After this: done\n- [ ] **S02: Slice B** \`risk:low\` \`depends:[S01]\`\n > After this: done\n`);
writeSlice(mDir, "S01", "# S01: Slice A\n\n**Goal:** A\n**Demo:** A\n\n## Tasks\n- [ ] **T01: Task** `est:10m`\n Pending.\n");
writeSlice(mDir, "S02", "# S02: Slice B\n\n**Goal:** B\n**Demo:** B\n\n## Tasks\n- [ ] **T01: Task** `est:10m`\n Pending.\n");
const result = await runGSDDoctor(base, { fix: false });
assertTrue(
result.issues.some(i => i.code === "circular_slice_dependency"),
"detects circular dependency S01 → S02 → S01",
);
rmSync(base, { recursive: true, force: true });
}
// ── 2. Duplicate task IDs ──────────────────────────────────────────────────
console.log("\n=== duplicate task IDs ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Dup Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [ ] **T01: First** `est:10m`\n Task one.\n- [ ] **T01: Duplicate** `est:10m`\n Task dup.\n");
const result = await runGSDDoctor(base, { fix: false });
assertTrue(
result.issues.some(i => i.code === "duplicate_task_id"),
"detects duplicate task ID T01",
);
rmSync(base, { recursive: true, force: true });
}
// ── 3. Orphaned slice directory ──────────────────────────────────────────
console.log("\n=== orphaned slice directory ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Orphan Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [ ] **T01: Task** `est:10m`\n Pending.\n");
// Create an extra slice directory not in roadmap
mkdirSync(join(mDir, "slices", "S99"), { recursive: true });
const result = await runGSDDoctor(base, { fix: false });
assertTrue(
result.issues.some(i => i.code === "orphaned_slice_directory" && i.message.includes("S99")),
"detects orphaned slice directory S99",
);
rmSync(base, { recursive: true, force: true });
}
// ── 4. Task file not in plan ───────────────────────────────────────────────
console.log("\n=== task file not in plan ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Extra Task Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
const sDir = writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [x] **T01: Task** `est:10m`\n Done.\n");
// T01 summary (matches plan)
writeFileSync(join(sDir, "tasks", "T01-SUMMARY.md"), "---\nstatus: done\n---\n# T01\nDone.\n");
// T99 summary (NOT in plan)
writeFileSync(join(sDir, "tasks", "T99-SUMMARY.md"), "---\nstatus: done\n---\n# T99\nExtra.\n");
const result = await runGSDDoctor(base, { fix: false });
assertTrue(
result.issues.some(i => i.code === "task_file_not_in_plan" && i.message.includes("T99")),
"detects task summary T99 not in plan",
);
rmSync(base, { recursive: true, force: true });
}
// ── 5. Stale REPLAN file ────────────────────────────────────────────────────
console.log("\n=== stale REPLAN detection ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Replan Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
const sDir = writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [x] **T01: Task** `est:10m`\n Done.\n");
writeFileSync(join(sDir, "tasks", "T01-SUMMARY.md"), "---\nstatus: done\ncompleted_at: 2026-01-01T00:00:00Z\n---\n# T01\nDone.\n");
// Add a REPLAN file even though all tasks are done
writeFileSync(join(sDir, "S01-REPLAN.md"), "# S01 REPLAN\nSomething changed.\n");
const result = await runGSDDoctor(base, { fix: false });
assertTrue(
result.issues.some(i => i.code === "stale_replan_file"),
"detects stale REPLAN when all tasks are done",
);
rmSync(base, { recursive: true, force: true });
}
// ── 6. Metrics ledger corrupt ───────────────────────────────────────────────
console.log("\n=== metrics ledger corrupt ===");
{
const { base, gsd, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Metrics Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [ ] **T01: Task** `est:10m`\n Pending.\n");
// Write invalid metrics.json
writeFileSync(join(gsd, "metrics.json"), '{"version":2,"data":[]}');
const result = await runGSDDoctor(base, { fix: false });
assertTrue(
result.issues.some(i => i.code === "metrics_ledger_corrupt"),
"detects corrupt metrics ledger (version != 1)",
);
rmSync(base, { recursive: true, force: true });
}
// ── 7. Large planning file ──────────────────────────────────────────────────
console.log("\n=== large planning file ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Large File Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
const sDir = writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [ ] **T01: Task** `est:10m`\n Pending.\n");
// Write a 101KB .md file
const bigContent = "# Big File\n" + "x".repeat(101 * 1024);
writeFileSync(join(sDir, "BIGFILE.md"), bigContent);
const result = await runGSDDoctor(base, { fix: false });
assertTrue(
result.issues.some(i => i.code === "large_planning_file"),
"detects large planning file over 100KB",
);
rmSync(base, { recursive: true, force: true });
}
// ── 8. Future timestamp ─────────────────────────────────────────────────────
console.log("\n=== future timestamp ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Timestamp Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
const sDir = writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [x] **T01: Task** `est:10m`\n Done.\n");
// completed_at is 2 days in the future
const futureDate = new Date(Date.now() + 2 * 24 * 60 * 60 * 1000).toISOString();
writeFileSync(
join(sDir, "tasks", "T01-SUMMARY.md"),
`---\nstatus: done\ncompleted_at: ${futureDate}\n---\n# T01\nDone.\n`,
);
const result = await runGSDDoctor(base, { fix: false });
assertTrue(
result.issues.some(i => i.code === "future_timestamp"),
"detects future completed_at timestamp",
);
rmSync(base, { recursive: true, force: true });
}
// ── 9. JSON output format ───────────────────────────────────────────────────
console.log("\n=== JSON output format ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: JSON Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [ ] **T01: Task** `est:10m`\n Pending.\n");
const result = await runGSDDoctor(base, { fix: false });
const json = formatDoctorReportJson(result);
let parsed: unknown;
try {
parsed = JSON.parse(json);
} catch {
parsed = null;
}
assertTrue(parsed !== null, "formatDoctorReportJson produces valid JSON");
assertTrue(typeof (parsed as Record<string, unknown>)?.ok === "boolean", "JSON has ok field");
assertTrue(Array.isArray((parsed as Record<string, unknown>)?.issues), "JSON has issues array");
assertTrue(Array.isArray((parsed as Record<string, unknown>)?.fixesApplied), "JSON has fixesApplied array");
assertTrue(typeof (parsed as Record<string, unknown>)?.generatedAt === "string", "JSON has generatedAt field");
assertTrue(typeof (parsed as Record<string, unknown>)?.summary === "object", "JSON has summary object");
rmSync(base, { recursive: true, force: true });
}
// ── 10. Dry-run mode ────────────────────────────────────────────────────────
console.log("\n=== dry-run mode ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Dry Run Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
const sDir = writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [x] **T01: Task** `est:10m`\n Done.\n");
const result = await runGSDDoctor(base, { fix: true, dryRun: true });
// In dry-run mode, no actual files should be created
assertTrue(!existsSync(join(sDir, "S01-SUMMARY.md")), "dry-run does not create slice summary");
assertTrue(
result.fixesApplied.some(f => f.startsWith("[dry-run]")),
"dry-run mode reports would-fix entries",
);
rmSync(base, { recursive: true, force: true });
}
// ── 11. Per-check timing ─────────────────────────────────────────────────────
console.log("\n=== per-check timing ===");
{
const { base, mDir } = makeBase();
writeRoadmap(mDir, `# M001: Timing Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [ ] **T01: Task** `est:10m`\n Pending.\n");
const result = await runGSDDoctor(base, { fix: false });
assertTrue(result.timing !== undefined, "report includes timing");
assertTrue(typeof result.timing?.git === "number", "timing.git is a number");
assertTrue(typeof result.timing?.runtime === "number", "timing.runtime is a number");
assertTrue(typeof result.timing?.environment === "number", "timing.environment is a number");
assertTrue(typeof result.timing?.gsdState === "number", "timing.gsdState is a number");
rmSync(base, { recursive: true, force: true });
}
// ── 12. Doctor history ───────────────────────────────────────────────────────
console.log("\n=== doctor history ===");
{
const { base, gsd, mDir } = makeBase();
writeRoadmap(mDir, `# M001: History Test\n\n## Slices\n- [ ] **S01: Slice** \`risk:low\` \`depends:[]\`\n > After this: done\n`);
writeSlice(mDir, "S01", "# S01: Slice\n\n**Goal:** G\n**Demo:** D\n\n## Tasks\n- [ ] **T01: Task** `est:10m`\n Pending.\n");
await runGSDDoctor(base, { fix: false });
const historyPath = join(gsd, "doctor-history.jsonl");
assertTrue(existsSync(historyPath), "doctor-history.jsonl is created after run");
const { readDoctorHistory } = await import("../doctor.js");
const history = await readDoctorHistory(base);
assertTrue(history.length >= 1, "history has at least one entry");
assertTrue(typeof history[0]?.ts === "string", "history entry has ts field");
assertTrue(typeof history[0]?.ok === "boolean", "history entry has ok field");
assertTrue(typeof history[0]?.errors === "number", "history entry has errors count");
assertTrue(Array.isArray(history[0]?.codes), "history entry has codes array");
rmSync(base, { recursive: true, force: true });
}
report();
}
main().catch(err => {
console.error(err);
process.exit(1);
});