refactor: split doctor-checks into focused modules
Each health check function (git, runtime, global, engine) moves to its own file with only the imports it needs. doctor-checks.ts becomes a re-export barrel for backward compatibility. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
ef5006e16d
commit
b559651531
5 changed files with 1312 additions and 1298 deletions
File diff suppressed because it is too large
Load diff
182
src/resources/extensions/gsd/doctor-engine-checks.ts
Normal file
182
src/resources/extensions/gsd/doctor-engine-checks.ts
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
import { existsSync, statSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
import type { DoctorIssue } from "./doctor-types.js";
|
||||
import { isDbAvailable, _getAdapter } from "./gsd-db.js";
|
||||
import { resolveMilestoneFile } from "./paths.js";
|
||||
import { deriveState } from "./state.js";
|
||||
import { readEvents } from "./workflow-events.js";
|
||||
import { renderAllProjections } from "./workflow-projections.js";
|
||||
|
||||
export async function checkEngineHealth(
|
||||
basePath: string,
|
||||
issues: DoctorIssue[],
|
||||
fixesApplied: string[],
|
||||
): Promise<void> {
|
||||
// ── DB constraint violation detection (full doctor only, not pre-dispatch per D-10) ──
|
||||
try {
|
||||
if (isDbAvailable()) {
|
||||
const adapter = _getAdapter()!;
|
||||
|
||||
// a. Orphaned tasks (task.slice_id points to non-existent slice)
|
||||
try {
|
||||
const orphanedTasks = adapter
|
||||
.prepare(
|
||||
`SELECT t.id, t.slice_id, t.milestone_id
|
||||
FROM tasks t
|
||||
LEFT JOIN slices s ON t.milestone_id = s.milestone_id AND t.slice_id = s.id
|
||||
WHERE s.id IS NULL`,
|
||||
)
|
||||
.all() as Array<{ id: string; slice_id: string; milestone_id: string }>;
|
||||
|
||||
for (const row of orphanedTasks) {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "db_orphaned_task",
|
||||
scope: "task",
|
||||
unitId: `${row.milestone_id}/${row.slice_id}/${row.id}`,
|
||||
message: `Task ${row.id} references slice ${row.slice_id} in milestone ${row.milestone_id} but no such slice exists in the database`,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — orphaned task check failed
|
||||
}
|
||||
|
||||
// b. Orphaned slices (slice.milestone_id points to non-existent milestone)
|
||||
try {
|
||||
const orphanedSlices = adapter
|
||||
.prepare(
|
||||
`SELECT s.id, s.milestone_id
|
||||
FROM slices s
|
||||
LEFT JOIN milestones m ON s.milestone_id = m.id
|
||||
WHERE m.id IS NULL`,
|
||||
)
|
||||
.all() as Array<{ id: string; milestone_id: string }>;
|
||||
|
||||
for (const row of orphanedSlices) {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "db_orphaned_slice",
|
||||
scope: "slice",
|
||||
unitId: `${row.milestone_id}/${row.id}`,
|
||||
message: `Slice ${row.id} references milestone ${row.milestone_id} but no such milestone exists in the database`,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — orphaned slice check failed
|
||||
}
|
||||
|
||||
// c. Tasks marked complete without summaries
|
||||
try {
|
||||
const doneTasks = adapter
|
||||
.prepare(
|
||||
`SELECT id, slice_id, milestone_id FROM tasks
|
||||
WHERE status = 'done' AND (summary IS NULL OR summary = '')`,
|
||||
)
|
||||
.all() as Array<{ id: string; slice_id: string; milestone_id: string }>;
|
||||
|
||||
for (const row of doneTasks) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "db_done_task_no_summary",
|
||||
scope: "task",
|
||||
unitId: `${row.milestone_id}/${row.slice_id}/${row.id}`,
|
||||
message: `Task ${row.id} is marked done but has no summary in the database`,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — done-task-no-summary check failed
|
||||
}
|
||||
|
||||
// d. Duplicate entity IDs (safety check)
|
||||
try {
|
||||
const dupMilestones = adapter
|
||||
.prepare("SELECT id, COUNT(*) as cnt FROM milestones GROUP BY id HAVING cnt > 1")
|
||||
.all() as Array<{ id: string; cnt: number }>;
|
||||
for (const row of dupMilestones) {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "db_duplicate_id",
|
||||
scope: "milestone",
|
||||
unitId: row.id,
|
||||
message: `Duplicate milestone ID "${row.id}" appears ${row.cnt} times in the database`,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
|
||||
const dupSlices = adapter
|
||||
.prepare("SELECT id, milestone_id, COUNT(*) as cnt FROM slices GROUP BY id, milestone_id HAVING cnt > 1")
|
||||
.all() as Array<{ id: string; milestone_id: string; cnt: number }>;
|
||||
for (const row of dupSlices) {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "db_duplicate_id",
|
||||
scope: "slice",
|
||||
unitId: `${row.milestone_id}/${row.id}`,
|
||||
message: `Duplicate slice ID "${row.id}" in milestone ${row.milestone_id} appears ${row.cnt} times`,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
|
||||
const dupTasks = adapter
|
||||
.prepare("SELECT id, slice_id, milestone_id, COUNT(*) as cnt FROM tasks GROUP BY id, slice_id, milestone_id HAVING cnt > 1")
|
||||
.all() as Array<{ id: string; slice_id: string; milestone_id: string; cnt: number }>;
|
||||
for (const row of dupTasks) {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "db_duplicate_id",
|
||||
scope: "task",
|
||||
unitId: `${row.milestone_id}/${row.slice_id}/${row.id}`,
|
||||
message: `Duplicate task ID "${row.id}" in slice ${row.slice_id} appears ${row.cnt} times`,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — duplicate ID check failed
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — DB constraint checks failed entirely
|
||||
}
|
||||
|
||||
// ── Projection drift detection ──────────────────────────────────────────
|
||||
// If the DB is available, check whether markdown projections are stale
|
||||
// relative to the event log and re-render them.
|
||||
try {
|
||||
if (isDbAvailable()) {
|
||||
const eventLogPath = join(basePath, ".gsd", "event-log.jsonl");
|
||||
const events = readEvents(eventLogPath);
|
||||
if (events.length > 0) {
|
||||
const lastEventTs = new Date(events[events.length - 1]!.ts).getTime();
|
||||
const state = await deriveState(basePath);
|
||||
for (const milestone of state.registry) {
|
||||
if (milestone.status === "complete") continue;
|
||||
const roadmapPath = resolveMilestoneFile(basePath, milestone.id, "ROADMAP");
|
||||
if (!roadmapPath || !existsSync(roadmapPath)) {
|
||||
try {
|
||||
await renderAllProjections(basePath, milestone.id);
|
||||
fixesApplied.push(`re-rendered missing projections for ${milestone.id}`);
|
||||
} catch {
|
||||
// Non-fatal — projection re-render failed
|
||||
}
|
||||
continue;
|
||||
}
|
||||
const projectionMtime = statSync(roadmapPath).mtimeMs;
|
||||
if (lastEventTs > projectionMtime) {
|
||||
try {
|
||||
await renderAllProjections(basePath, milestone.id);
|
||||
fixesApplied.push(`re-rendered stale projections for ${milestone.id}`);
|
||||
} catch {
|
||||
// Non-fatal — projection re-render failed
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — projection drift check must never block doctor
|
||||
}
|
||||
}
|
||||
415
src/resources/extensions/gsd/doctor-git-checks.ts
Normal file
415
src/resources/extensions/gsd/doctor-git-checks.ts
Normal file
|
|
@ -0,0 +1,415 @@
|
|||
import { existsSync, readdirSync, realpathSync, rmSync, statSync } from "node:fs";
|
||||
import { join, sep } from "node:path";
|
||||
|
||||
import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js";
|
||||
import { loadFile } from "./files.js";
|
||||
import { parseRoadmap as parseLegacyRoadmap } from "./parsers-legacy.js";
|
||||
import { isDbAvailable, getMilestoneSlices } from "./gsd-db.js";
|
||||
import { resolveMilestoneFile } from "./paths.js";
|
||||
import { deriveState, isMilestoneComplete } from "./state.js";
|
||||
import { listWorktrees, resolveGitDir, worktreesDir } from "./worktree-manager.js";
|
||||
import { abortAndReset } from "./git-self-heal.js";
|
||||
import { RUNTIME_EXCLUSION_PATHS, resolveMilestoneIntegrationBranch, writeIntegrationBranch } from "./git-service.js";
|
||||
import { nativeIsRepo, nativeWorktreeList, nativeWorktreeRemove, nativeBranchList, nativeBranchDelete, nativeLsFiles, nativeRmCached } from "./native-git-bridge.js";
|
||||
import { getAllWorktreeHealth } from "./worktree-health.js";
|
||||
import { loadEffectiveGSDPreferences } from "./preferences.js";
|
||||
|
||||
export async function checkGitHealth(
|
||||
basePath: string,
|
||||
issues: DoctorIssue[],
|
||||
fixesApplied: string[],
|
||||
shouldFix: (code: DoctorIssueCode) => boolean,
|
||||
isolationMode: "none" | "worktree" | "branch" = "none",
|
||||
): Promise<void> {
|
||||
// Degrade gracefully if not a git repo
|
||||
if (!nativeIsRepo(basePath)) {
|
||||
return; // Not a git repo — skip all git health checks
|
||||
}
|
||||
|
||||
const gitDir = resolveGitDir(basePath);
|
||||
|
||||
// ── Orphaned auto-worktrees & Stale milestone branches ────────────────
|
||||
// These checks only apply in worktree/branch modes — skip in none mode
|
||||
// where no milestone worktrees or branches are created.
|
||||
if (isolationMode !== "none") {
|
||||
try {
|
||||
const worktrees = listWorktrees(basePath);
|
||||
const milestoneWorktrees = worktrees.filter(wt => wt.branch.startsWith("milestone/"));
|
||||
|
||||
// Load roadmap state once for cross-referencing
|
||||
const state = await deriveState(basePath);
|
||||
|
||||
for (const wt of milestoneWorktrees) {
|
||||
// Extract milestone ID from branch name "milestone/M001" → "M001"
|
||||
const milestoneId = wt.branch.replace(/^milestone\//, "");
|
||||
const milestoneEntry = state.registry.find(m => m.id === milestoneId);
|
||||
|
||||
// Check if milestone is complete via roadmap
|
||||
let isComplete = false;
|
||||
if (milestoneEntry) {
|
||||
if (isDbAvailable()) {
|
||||
const dbSlices = getMilestoneSlices(milestoneId);
|
||||
isComplete = dbSlices.length > 0 && dbSlices.every(s => s.status === "complete");
|
||||
} else {
|
||||
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseLegacyRoadmap(roadmapContent);
|
||||
isComplete = isMilestoneComplete(roadmap);
|
||||
}
|
||||
}
|
||||
// When DB unavailable and no roadmap, isComplete stays false
|
||||
}
|
||||
|
||||
if (isComplete) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "orphaned_auto_worktree",
|
||||
scope: "milestone",
|
||||
unitId: milestoneId,
|
||||
message: `Worktree for completed milestone ${milestoneId} still exists at ${wt.path}`,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("orphaned_auto_worktree")) {
|
||||
// If cwd is inside the worktree, chdir out first — matching the
|
||||
// pattern in removeWorktree() (#1946). Without this, git cannot
|
||||
// remove the worktree and the doctor enters a deadlock where it
|
||||
// detects the orphan every run but never cleans it up.
|
||||
const cwd = process.cwd();
|
||||
if (wt.path === cwd || cwd.startsWith(wt.path + sep)) {
|
||||
try {
|
||||
process.chdir(basePath);
|
||||
} catch {
|
||||
fixesApplied.push(`skipped removing worktree at ${wt.path} (cannot chdir to basePath)`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
try {
|
||||
nativeWorktreeRemove(basePath, wt.path, true);
|
||||
fixesApplied.push(`removed orphaned worktree ${wt.path}`);
|
||||
} catch {
|
||||
fixesApplied.push(`failed to remove worktree ${wt.path}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Stale milestone branches ─────────────────────────────────────────
|
||||
try {
|
||||
const branches = nativeBranchList(basePath, "milestone/*");
|
||||
if (branches.length > 0) {
|
||||
const worktreeBranches = new Set(milestoneWorktrees.map(wt => wt.branch));
|
||||
|
||||
for (const branch of branches) {
|
||||
// Skip branches that have a worktree (handled above)
|
||||
if (worktreeBranches.has(branch)) continue;
|
||||
|
||||
const milestoneId = branch.replace(/^milestone\//, "");
|
||||
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
|
||||
let branchMilestoneComplete = false;
|
||||
if (isDbAvailable()) {
|
||||
const dbSlices = getMilestoneSlices(milestoneId);
|
||||
branchMilestoneComplete = dbSlices.length > 0 && dbSlices.every(s => s.status === "complete");
|
||||
} else {
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (!roadmapContent) continue;
|
||||
const roadmap = parseLegacyRoadmap(roadmapContent);
|
||||
branchMilestoneComplete = isMilestoneComplete(roadmap);
|
||||
}
|
||||
if (branchMilestoneComplete) {
|
||||
issues.push({
|
||||
severity: "info",
|
||||
code: "stale_milestone_branch",
|
||||
scope: "milestone",
|
||||
unitId: milestoneId,
|
||||
message: `Branch ${branch} exists for completed milestone ${milestoneId}`,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("stale_milestone_branch")) {
|
||||
try {
|
||||
nativeBranchDelete(basePath, branch, true);
|
||||
fixesApplied.push(`deleted stale branch ${branch}`);
|
||||
} catch {
|
||||
fixesApplied.push(`failed to delete branch ${branch}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// git branch list failed — skip stale branch check
|
||||
}
|
||||
} catch {
|
||||
// listWorktrees or deriveState failed — skip worktree/branch checks
|
||||
}
|
||||
} // end isolationMode !== "none"
|
||||
|
||||
// ── Corrupt merge state ────────────────────────────────────────────────
|
||||
try {
|
||||
const mergeStateFiles = ["MERGE_HEAD", "SQUASH_MSG"];
|
||||
const mergeStateDirs = ["rebase-apply", "rebase-merge"];
|
||||
const found: string[] = [];
|
||||
|
||||
for (const f of mergeStateFiles) {
|
||||
if (existsSync(join(gitDir, f))) found.push(f);
|
||||
}
|
||||
for (const d of mergeStateDirs) {
|
||||
if (existsSync(join(gitDir, d))) found.push(d);
|
||||
}
|
||||
|
||||
if (found.length > 0) {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "corrupt_merge_state",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `Corrupt merge/rebase state detected: ${found.join(", ")}`,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("corrupt_merge_state")) {
|
||||
const result = abortAndReset(basePath);
|
||||
fixesApplied.push(`cleaned merge state: ${result.cleaned.join(", ")}`);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Can't check .git dir — skip
|
||||
}
|
||||
|
||||
// ── Tracked runtime files ──────────────────────────────────────────────
|
||||
try {
|
||||
const trackedPaths: string[] = [];
|
||||
for (const exclusion of RUNTIME_EXCLUSION_PATHS) {
|
||||
try {
|
||||
const files = nativeLsFiles(basePath, exclusion);
|
||||
if (files.length > 0) {
|
||||
trackedPaths.push(...files);
|
||||
}
|
||||
} catch {
|
||||
// Individual ls-files can fail — continue
|
||||
}
|
||||
}
|
||||
|
||||
if (trackedPaths.length > 0) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "tracked_runtime_files",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `${trackedPaths.length} runtime file(s) are tracked by git: ${trackedPaths.slice(0, 5).join(", ")}${trackedPaths.length > 5 ? "..." : ""}`,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("tracked_runtime_files")) {
|
||||
try {
|
||||
for (const exclusion of RUNTIME_EXCLUSION_PATHS) {
|
||||
nativeRmCached(basePath, [exclusion]);
|
||||
}
|
||||
fixesApplied.push(`untracked ${trackedPaths.length} runtime file(s)`);
|
||||
} catch {
|
||||
fixesApplied.push("failed to untrack runtime files");
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// git ls-files failed — skip
|
||||
}
|
||||
|
||||
// ── Legacy slice branches ──────────────────────────────────────────────
|
||||
try {
|
||||
const branchList = nativeBranchList(basePath, "gsd/*/*")
|
||||
.filter((branch) => !branch.startsWith("gsd/quick/"));
|
||||
if (branchList.length > 0) {
|
||||
issues.push({
|
||||
severity: "info",
|
||||
code: "legacy_slice_branches",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `${branchList.length} legacy slice branch(es) found: ${branchList.slice(0, 3).join(", ")}${branchList.length > 3 ? "..." : ""}. These are no longer used (branchless architecture).`,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("legacy_slice_branches")) {
|
||||
let deleted = 0;
|
||||
for (const branch of branchList) {
|
||||
try {
|
||||
nativeBranchDelete(basePath, branch, true);
|
||||
deleted++;
|
||||
} catch { /* skip branches that can't be deleted */ }
|
||||
}
|
||||
if (deleted > 0) {
|
||||
fixesApplied.push(`deleted ${deleted} legacy slice branch(es)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// git branch list failed — skip
|
||||
}
|
||||
|
||||
// ── Integration branch existence ──────────────────────────────────────
|
||||
// For each active (non-complete) milestone, verify the stored integration
|
||||
// branch still exists in git. A missing integration branch blocks merge-back
|
||||
// and causes the next merge operation to fail silently.
|
||||
try {
|
||||
const state = await deriveState(basePath);
|
||||
const gitPrefs = loadEffectiveGSDPreferences()?.preferences?.git ?? {};
|
||||
for (const milestone of state.registry) {
|
||||
if (milestone.status === "complete") continue;
|
||||
const resolution = resolveMilestoneIntegrationBranch(basePath, milestone.id, gitPrefs);
|
||||
if (!resolution.recordedBranch) continue; // No stored branch — skip (not yet set)
|
||||
if (resolution.status === "fallback" && resolution.effectiveBranch) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "integration_branch_missing",
|
||||
scope: "milestone",
|
||||
unitId: milestone.id,
|
||||
message: resolution.reason,
|
||||
fixable: true,
|
||||
});
|
||||
if (shouldFix("integration_branch_missing")) {
|
||||
writeIntegrationBranch(basePath, milestone.id, resolution.effectiveBranch);
|
||||
fixesApplied.push(`updated integration branch for ${milestone.id} to "${resolution.effectiveBranch}"`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (resolution.status === "missing") {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "integration_branch_missing",
|
||||
scope: "milestone",
|
||||
unitId: milestone.id,
|
||||
message: resolution.reason,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — integration branch check failed
|
||||
}
|
||||
|
||||
// ── Orphaned worktree directories ────────────────────────────────────
|
||||
// Worktree removal can fail after a branch delete, leaving a directory
|
||||
// that is no longer registered with git. These orphaned dirs cause
|
||||
// "already exists" errors when re-creating the same worktree name.
|
||||
try {
|
||||
const wtDir = worktreesDir(basePath);
|
||||
if (existsSync(wtDir)) {
|
||||
// Resolve symlinks and normalize separators so that symlinked .gsd
|
||||
// paths (e.g. ~/.gsd/projects/<hash>/worktrees/…) match the paths
|
||||
// returned by `git worktree list`.
|
||||
const normalizePath = (p: string): string => {
|
||||
try { p = realpathSync(p); } catch { /* path may not exist */ }
|
||||
return p.replaceAll("\\", "/");
|
||||
};
|
||||
const registeredPaths = new Set(
|
||||
nativeWorktreeList(basePath).map(entry => normalizePath(entry.path)),
|
||||
);
|
||||
for (const entry of readdirSync(wtDir)) {
|
||||
const fullPath = join(wtDir, entry);
|
||||
try {
|
||||
if (!statSync(fullPath).isDirectory()) continue;
|
||||
} catch { continue; }
|
||||
const normalizedFullPath = normalizePath(fullPath);
|
||||
if (!registeredPaths.has(normalizedFullPath)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "worktree_directory_orphaned",
|
||||
scope: "project",
|
||||
unitId: entry,
|
||||
message: `Worktree directory ${fullPath} exists on disk but is not registered with git. Run "git worktree prune" or doctor --fix to remove it.`,
|
||||
fixable: true,
|
||||
});
|
||||
if (shouldFix("worktree_directory_orphaned")) {
|
||||
try {
|
||||
rmSync(fullPath, { recursive: true, force: true });
|
||||
fixesApplied.push(`removed orphaned worktree directory ${fullPath}`);
|
||||
} catch {
|
||||
fixesApplied.push(`failed to remove orphaned worktree directory ${fullPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — orphaned worktree directory check failed
|
||||
}
|
||||
|
||||
// ── Worktree lifecycle checks ──────────────────────────────────────────
|
||||
// Check GSD-managed worktrees for: merged branches, stale work, dirty
|
||||
// state, and unpushed commits. Only worktrees under .gsd/worktrees/.
|
||||
try {
|
||||
const healthStatuses = getAllWorktreeHealth(basePath);
|
||||
const cwd = process.cwd();
|
||||
|
||||
for (const health of healthStatuses) {
|
||||
const wt = health.worktree;
|
||||
const isCwd = wt.path === cwd || cwd.startsWith(wt.path + sep);
|
||||
|
||||
// Branch fully merged into main — safe to remove
|
||||
if (health.mergedIntoMain) {
|
||||
issues.push({
|
||||
severity: "info",
|
||||
code: "worktree_branch_merged",
|
||||
scope: "project",
|
||||
unitId: wt.name,
|
||||
message: `Worktree "${wt.name}" (branch ${wt.branch}) is fully merged into main${health.safeToRemove ? " — safe to remove" : ""}`,
|
||||
fixable: health.safeToRemove,
|
||||
});
|
||||
|
||||
if (health.safeToRemove && shouldFix("worktree_branch_merged") && !isCwd) {
|
||||
try {
|
||||
const { removeWorktree } = await import("./worktree-manager.js");
|
||||
removeWorktree(basePath, wt.name, { deleteBranch: true, branch: wt.branch });
|
||||
fixesApplied.push(`removed merged worktree "${wt.name}" and deleted branch ${wt.branch}`);
|
||||
} catch {
|
||||
fixesApplied.push(`failed to remove merged worktree "${wt.name}"`);
|
||||
}
|
||||
}
|
||||
// If merged, skip the stale/dirty/unpushed checks — they're irrelevant
|
||||
continue;
|
||||
}
|
||||
|
||||
// Stale: no commits in N days, not merged
|
||||
if (health.stale) {
|
||||
const days = Math.floor(health.lastCommitAgeDays);
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "worktree_stale",
|
||||
scope: "project",
|
||||
unitId: wt.name,
|
||||
message: `Worktree "${wt.name}" has had no commits in ${days} day${days === 1 ? "" : "s"}`,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Dirty: uncommitted changes in a worktree (only flag on stale worktrees to avoid noise)
|
||||
if (health.dirty && health.stale) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "worktree_dirty",
|
||||
scope: "project",
|
||||
unitId: wt.name,
|
||||
message: `Worktree "${wt.name}" has ${health.dirtyFileCount} uncommitted file${health.dirtyFileCount === 1 ? "" : "s"} and is stale`,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Unpushed: commits not on any remote (only flag on stale worktrees to avoid noise)
|
||||
if (health.unpushedCommits > 0 && health.stale) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "worktree_unpushed",
|
||||
scope: "project",
|
||||
unitId: wt.name,
|
||||
message: `Worktree "${wt.name}" has ${health.unpushedCommits} unpushed commit${health.unpushedCommits === 1 ? "" : "s"}`,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — worktree lifecycle check failed
|
||||
}
|
||||
}
|
||||
84
src/resources/extensions/gsd/doctor-global-checks.ts
Normal file
84
src/resources/extensions/gsd/doctor-global-checks.ts
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import { existsSync, readdirSync, rmSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js";
|
||||
import { readRepoMeta, externalProjectsRoot } from "./repo-identity.js";
|
||||
|
||||
/**
|
||||
* Check for orphaned project state directories in ~/.gsd/projects/.
|
||||
*
|
||||
* A project directory is orphaned when its recorded gitRoot no longer exists
|
||||
* on disk — the repo was deleted, moved, or the external drive was unmounted.
|
||||
* These directories accumulate silently and waste disk space.
|
||||
*
|
||||
* Severity: info — orphaned state is harmless but takes disk space.
|
||||
* Fixable: yes — rmSync the directory. Never auto-fixed at fixLevel="task".
|
||||
*/
|
||||
export async function checkGlobalHealth(
|
||||
issues: DoctorIssue[],
|
||||
fixesApplied: string[],
|
||||
shouldFix: (code: DoctorIssueCode) => boolean,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const projectsDir = externalProjectsRoot();
|
||||
|
||||
if (!existsSync(projectsDir)) return;
|
||||
|
||||
let entries: string[];
|
||||
try {
|
||||
entries = readdirSync(projectsDir, { withFileTypes: true })
|
||||
.filter(e => e.isDirectory())
|
||||
.map(e => e.name);
|
||||
} catch {
|
||||
return; // Can't read directory — skip
|
||||
}
|
||||
|
||||
if (entries.length === 0) return;
|
||||
|
||||
const orphaned: Array<{ hash: string; gitRoot: string; remoteUrl: string }> = [];
|
||||
let unknownCount = 0;
|
||||
|
||||
for (const hash of entries) {
|
||||
const dirPath = join(projectsDir, hash);
|
||||
const meta = readRepoMeta(dirPath);
|
||||
if (!meta) {
|
||||
unknownCount++;
|
||||
continue;
|
||||
}
|
||||
if (!existsSync(meta.gitRoot)) {
|
||||
orphaned.push({ hash, gitRoot: meta.gitRoot, remoteUrl: meta.remoteUrl });
|
||||
}
|
||||
}
|
||||
|
||||
if (orphaned.length === 0) return;
|
||||
|
||||
const labels = orphaned.slice(0, 3).map(o => o.gitRoot).join(", ");
|
||||
const overflow = orphaned.length > 3 ? ` (+${orphaned.length - 3} more)` : "";
|
||||
const unknownNote = unknownCount > 0 ? ` — ${unknownCount} additional director${unknownCount === 1 ? "y" : "ies"} have no metadata yet (open those repos once to register them)` : "";
|
||||
|
||||
issues.push({
|
||||
severity: "info",
|
||||
code: "orphaned_project_state",
|
||||
scope: "project",
|
||||
unitId: "global",
|
||||
message: `${orphaned.length} orphaned GSD project state director${orphaned.length === 1 ? "y" : "ies"} in ${projectsDir} whose git root no longer exists: ${labels}${overflow}${unknownNote}. Run /gsd cleanup projects to audit or /gsd cleanup projects --fix to reclaim disk space.`,
|
||||
file: projectsDir,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("orphaned_project_state")) {
|
||||
let removed = 0;
|
||||
for (const { hash } of orphaned) {
|
||||
try {
|
||||
rmSync(join(projectsDir, hash), { recursive: true, force: true });
|
||||
removed++;
|
||||
} catch {
|
||||
// Individual removal failure is non-fatal — continue with remaining
|
||||
}
|
||||
}
|
||||
fixesApplied.push(`removed ${removed} orphaned project state director${removed === 1 ? "y" : "ies"} from ${projectsDir}`);
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — global health check must not block per-project doctor
|
||||
}
|
||||
}
|
||||
626
src/resources/extensions/gsd/doctor-runtime-checks.ts
Normal file
626
src/resources/extensions/gsd/doctor-runtime-checks.ts
Normal file
|
|
@ -0,0 +1,626 @@
|
|||
import { existsSync, lstatSync, readdirSync, readFileSync, realpathSync, rmSync, statSync } from "node:fs";
|
||||
import { basename, dirname, join } from "node:path";
|
||||
|
||||
import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js";
|
||||
import { cleanNumberedGsdVariants } from "./repo-identity.js";
|
||||
import { milestonesDir, gsdRoot, resolveGsdRootFile } from "./paths.js";
|
||||
import { deriveState } from "./state.js";
|
||||
import { saveFile } from "./files.js";
|
||||
import { nativeIsRepo, nativeForEachRef, nativeUpdateRef } from "./native-git-bridge.js";
|
||||
import { readCrashLock, isLockProcessAlive, clearLock } from "./crash-recovery.js";
|
||||
import { ensureGitignore } from "./gitignore.js";
|
||||
import { readAllSessionStatuses, isSessionStale, removeSessionStatus } from "./session-status-io.js";
|
||||
import { recoverFailedMigration } from "./migrate-external.js";
|
||||
|
||||
export async function checkRuntimeHealth(
|
||||
basePath: string,
|
||||
issues: DoctorIssue[],
|
||||
fixesApplied: string[],
|
||||
shouldFix: (code: DoctorIssueCode) => boolean,
|
||||
): Promise<void> {
|
||||
const root = gsdRoot(basePath);
|
||||
|
||||
// ── Stale crash lock ──────────────────────────────────────────────────
|
||||
try {
|
||||
const lock = readCrashLock(basePath);
|
||||
if (lock) {
|
||||
const alive = isLockProcessAlive(lock);
|
||||
if (!alive) {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "stale_crash_lock",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `Stale auto.lock from PID ${lock.pid} (started ${lock.startedAt}, was executing ${lock.unitType} ${lock.unitId}) — process is no longer running`,
|
||||
file: ".gsd/auto.lock",
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("stale_crash_lock")) {
|
||||
clearLock(basePath);
|
||||
fixesApplied.push("cleared stale auto.lock");
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — crash lock check failed
|
||||
}
|
||||
|
||||
// ── Stranded lock directory ────────────────────────────────────────────
|
||||
// proper-lockfile creates a `.gsd.lock/` directory as the OS-level lock
|
||||
// mechanism. If the process was SIGKILLed or crashed hard, this directory
|
||||
// can remain on disk without any live process holding it. The next session
|
||||
// fails to acquire the lock until the directory is removed (#1245).
|
||||
try {
|
||||
const lockDir = join(dirname(root), `${basename(root)}.lock`);
|
||||
if (existsSync(lockDir)) {
|
||||
const statRes = statSync(lockDir);
|
||||
if (statRes.isDirectory()) {
|
||||
// Check if any live process actually holds this lock
|
||||
const lock = readCrashLock(basePath);
|
||||
const lockHolderAlive = lock ? isLockProcessAlive(lock) : false;
|
||||
if (!lockHolderAlive) {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "stranded_lock_directory",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `Stranded lock directory "${lockDir}" exists but no live process holds the session lock. This blocks new auto-mode sessions from starting.`,
|
||||
file: lockDir,
|
||||
fixable: true,
|
||||
});
|
||||
if (shouldFix("stranded_lock_directory")) {
|
||||
try {
|
||||
rmSync(lockDir, { recursive: true, force: true });
|
||||
fixesApplied.push(`removed stranded lock directory ${lockDir}`);
|
||||
} catch {
|
||||
fixesApplied.push(`failed to remove stranded lock directory ${lockDir}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — stranded lock directory check failed
|
||||
}
|
||||
|
||||
// ── Stale parallel sessions ────────────────────────────────────────────
|
||||
try {
|
||||
const parallelStatuses = readAllSessionStatuses(basePath);
|
||||
for (const status of parallelStatuses) {
|
||||
if (isSessionStale(status)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "stale_parallel_session",
|
||||
scope: "project",
|
||||
unitId: status.milestoneId,
|
||||
message: `Stale parallel session for ${status.milestoneId} (PID ${status.pid}, started ${new Date(status.startedAt).toISOString()}, last heartbeat ${new Date(status.lastHeartbeat).toISOString()}) — process is no longer running`,
|
||||
file: `.gsd/parallel/${status.milestoneId}.status.json`,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("stale_parallel_session")) {
|
||||
removeSessionStatus(basePath, status.milestoneId);
|
||||
fixesApplied.push(`cleaned up stale parallel session for ${status.milestoneId}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — parallel session check failed
|
||||
}
|
||||
|
||||
// ── Orphaned completed-units keys ─────────────────────────────────────
|
||||
try {
|
||||
const completedKeysFile = join(root, "completed-units.json");
|
||||
if (existsSync(completedKeysFile)) {
|
||||
const raw = readFileSync(completedKeysFile, "utf-8");
|
||||
const keys: string[] = JSON.parse(raw);
|
||||
const orphaned: string[] = [];
|
||||
|
||||
for (const key of keys) {
|
||||
// Key format: "unitType/unitId" e.g. "execute-task/M001/S01/T01"
|
||||
const slashIdx = key.indexOf("/");
|
||||
if (slashIdx === -1) continue;
|
||||
const unitType = key.slice(0, slashIdx);
|
||||
const unitId = key.slice(slashIdx + 1);
|
||||
|
||||
// Only validate artifact-producing unit types
|
||||
const { verifyExpectedArtifact } = await import("./auto-recovery.js");
|
||||
if (!verifyExpectedArtifact(unitType, unitId, basePath)) {
|
||||
orphaned.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
if (orphaned.length > 0) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "orphaned_completed_units",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `${orphaned.length} completed-unit key(s) reference missing artifacts: ${orphaned.slice(0, 3).join(", ")}${orphaned.length > 3 ? "..." : ""}`,
|
||||
file: ".gsd/completed-units.json",
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("orphaned_completed_units")) {
|
||||
const orphanedSet = new Set(orphaned);
|
||||
const remaining = keys.filter((key) => !orphanedSet.has(key));
|
||||
await saveFile(completedKeysFile, JSON.stringify(remaining));
|
||||
fixesApplied.push(`removed ${orphaned.length} orphaned completed-unit key(s)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — completed-units check failed
|
||||
}
|
||||
|
||||
// ── Stale hook state ──────────────────────────────────────────────────
|
||||
try {
|
||||
const hookStateFile = join(root, "hook-state.json");
|
||||
if (existsSync(hookStateFile)) {
|
||||
const raw = readFileSync(hookStateFile, "utf-8");
|
||||
const state = JSON.parse(raw);
|
||||
const hasCycleCounts = state.cycleCounts && typeof state.cycleCounts === "object"
|
||||
&& Object.keys(state.cycleCounts).length > 0;
|
||||
|
||||
// Only flag if there are actual cycle counts AND no auto-mode is running
|
||||
if (hasCycleCounts) {
|
||||
const lock = readCrashLock(basePath);
|
||||
const autoRunning = lock ? isLockProcessAlive(lock) : false;
|
||||
|
||||
if (!autoRunning) {
|
||||
issues.push({
|
||||
severity: "info",
|
||||
code: "stale_hook_state",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `hook-state.json has ${Object.keys(state.cycleCounts).length} residual cycle count(s) from a previous session`,
|
||||
file: ".gsd/hook-state.json",
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("stale_hook_state")) {
|
||||
const { clearPersistedHookState } = await import("./post-unit-hooks.js");
|
||||
clearPersistedHookState(basePath);
|
||||
fixesApplied.push("cleared stale hook-state.json");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — hook state check failed
|
||||
}
|
||||
|
||||
// ── Activity log bloat ────────────────────────────────────────────────
|
||||
try {
|
||||
const activityDir = join(root, "activity");
|
||||
if (existsSync(activityDir)) {
|
||||
const files = readdirSync(activityDir);
|
||||
let totalSize = 0;
|
||||
for (const f of files) {
|
||||
try {
|
||||
totalSize += statSync(join(activityDir, f)).size;
|
||||
} catch {
|
||||
// stat failed — skip
|
||||
}
|
||||
}
|
||||
|
||||
const totalMB = totalSize / (1024 * 1024);
|
||||
const BLOAT_FILE_THRESHOLD = 500;
|
||||
const BLOAT_SIZE_MB = 100;
|
||||
|
||||
if (files.length > BLOAT_FILE_THRESHOLD || totalMB > BLOAT_SIZE_MB) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "activity_log_bloat",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `Activity logs: ${files.length} files, ${totalMB.toFixed(1)}MB (thresholds: ${BLOAT_FILE_THRESHOLD} files / ${BLOAT_SIZE_MB}MB)`,
|
||||
file: ".gsd/activity/",
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("activity_log_bloat")) {
|
||||
const { pruneActivityLogs } = await import("./activity-log.js");
|
||||
pruneActivityLogs(activityDir, 7); // 7-day retention
|
||||
fixesApplied.push("pruned activity logs (7-day retention)");
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — activity log check failed
|
||||
}
|
||||
|
||||
// ── STATE.md health ───────────────────────────────────────────────────
|
||||
try {
|
||||
const stateFilePath = resolveGsdRootFile(basePath, "STATE");
|
||||
const milestonesPath = milestonesDir(basePath);
|
||||
|
||||
if (existsSync(milestonesPath)) {
|
||||
if (!existsSync(stateFilePath)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "state_file_missing",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: "STATE.md is missing — state display will not work",
|
||||
file: ".gsd/STATE.md",
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("state_file_missing")) {
|
||||
const state = await deriveState(basePath);
|
||||
await saveFile(stateFilePath, buildStateMarkdownForCheck(state));
|
||||
fixesApplied.push("created STATE.md from derived state");
|
||||
}
|
||||
} else {
|
||||
// Check if STATE.md is stale by comparing active milestone/slice/phase
|
||||
const currentContent = readFileSync(stateFilePath, "utf-8");
|
||||
const state = await deriveState(basePath);
|
||||
const freshContent = buildStateMarkdownForCheck(state);
|
||||
|
||||
// Extract key fields for comparison — don't compare full content
|
||||
// since timestamp/formatting differences are normal
|
||||
const extractFields = (content: string) => {
|
||||
const milestone = content.match(/\*\*Active Milestone:\*\*\s*(.+)/)?.[1]?.trim() ?? "";
|
||||
const slice = content.match(/\*\*Active Slice:\*\*\s*(.+)/)?.[1]?.trim() ?? "";
|
||||
const phase = content.match(/\*\*Phase:\*\*\s*(.+)/)?.[1]?.trim() ?? "";
|
||||
return { milestone, slice, phase };
|
||||
};
|
||||
|
||||
const current = extractFields(currentContent);
|
||||
const fresh = extractFields(freshContent);
|
||||
|
||||
if (current.milestone !== fresh.milestone || current.slice !== fresh.slice || current.phase !== fresh.phase) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "state_file_stale",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `STATE.md is stale — shows "${current.phase}" but derived state is "${fresh.phase}"`,
|
||||
file: ".gsd/STATE.md",
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("state_file_stale")) {
|
||||
await saveFile(stateFilePath, freshContent);
|
||||
fixesApplied.push("rebuilt STATE.md from derived state");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — STATE.md check failed
|
||||
}
|
||||
|
||||
// ── Gitignore drift ───────────────────────────────────────────────────
|
||||
try {
|
||||
const gitignorePath = join(basePath, ".gitignore");
|
||||
if (existsSync(gitignorePath) && nativeIsRepo(basePath)) {
|
||||
const content = readFileSync(gitignorePath, "utf-8");
|
||||
const existingLines = new Set(
|
||||
content.split("\n").map(l => l.trim()).filter(l => l && !l.startsWith("#")),
|
||||
);
|
||||
|
||||
// Check for critical runtime patterns that must be present
|
||||
const criticalPatterns = [
|
||||
".gsd/activity/",
|
||||
".gsd/runtime/",
|
||||
".gsd/auto.lock",
|
||||
".gsd/gsd.db",
|
||||
".gsd/completed-units.json",
|
||||
];
|
||||
|
||||
// If blanket .gsd/ or .gsd is present, all patterns are covered
|
||||
const hasBlanketIgnore = existingLines.has(".gsd/") || existingLines.has(".gsd");
|
||||
|
||||
if (!hasBlanketIgnore) {
|
||||
const missing = criticalPatterns.filter(p => !existingLines.has(p));
|
||||
if (missing.length > 0) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "gitignore_missing_patterns",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `${missing.length} critical GSD runtime pattern(s) missing from .gitignore: ${missing.join(", ")}`,
|
||||
file: ".gitignore",
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("gitignore_missing_patterns")) {
|
||||
ensureGitignore(basePath);
|
||||
fixesApplied.push("added missing GSD runtime patterns to .gitignore");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — gitignore check failed
|
||||
}
|
||||
|
||||
// ── External state symlink health ──────────────────────────────────────
|
||||
try {
|
||||
const localGsd = join(basePath, ".gsd");
|
||||
if (existsSync(localGsd)) {
|
||||
const stat = lstatSync(localGsd);
|
||||
|
||||
// Check for .gsd.migrating (failed migration)
|
||||
const migratingPath = join(basePath, ".gsd.migrating");
|
||||
if (existsSync(migratingPath)) {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "failed_migration",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: "Found .gsd.migrating — a previous external state migration failed. State may be incomplete.",
|
||||
file: ".gsd.migrating",
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("failed_migration")) {
|
||||
if (recoverFailedMigration(basePath)) {
|
||||
fixesApplied.push("recovered failed migration (.gsd.migrating → .gsd)");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check symlink target exists
|
||||
if (stat.isSymbolicLink()) {
|
||||
try {
|
||||
realpathSync(localGsd);
|
||||
} catch {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "broken_symlink",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: ".gsd symlink target does not exist. External state directory may have been deleted.",
|
||||
file: ".gsd",
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — external state check failed
|
||||
}
|
||||
|
||||
// ── Numbered .gsd collision variants (#2205) ───────────────────────────
|
||||
// macOS APFS can create ".gsd 2", ".gsd 3" etc. when a directory blocks
|
||||
// symlink creation. These must be removed so the canonical .gsd is used.
|
||||
try {
|
||||
const variantPattern = /^\.gsd \d+$/;
|
||||
const entries = readdirSync(basePath);
|
||||
const variants = entries.filter(e => variantPattern.test(e));
|
||||
if (variants.length > 0) {
|
||||
for (const v of variants) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "numbered_gsd_variant",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `Found macOS collision variant "${v}" — this can cause GSD state to appear deleted.`,
|
||||
file: v,
|
||||
fixable: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (shouldFix("numbered_gsd_variant")) {
|
||||
const removed = cleanNumberedGsdVariants(basePath);
|
||||
for (const name of removed) {
|
||||
fixesApplied.push(`removed numbered .gsd variant: ${name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — variant check failed
|
||||
}
|
||||
|
||||
// ── Metrics ledger integrity ───────────────────────────────────────────
|
||||
try {
|
||||
const metricsPath = join(root, "metrics.json");
|
||||
if (existsSync(metricsPath)) {
|
||||
try {
|
||||
const raw = readFileSync(metricsPath, "utf-8");
|
||||
const ledger = JSON.parse(raw);
|
||||
if (ledger.version !== 1 || !Array.isArray(ledger.units)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "metrics_ledger_corrupt",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: "metrics.json has an unexpected structure (version !== 1 or units is not an array) — metrics data may be unreliable",
|
||||
file: ".gsd/metrics.json",
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "metrics_ledger_corrupt",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: "metrics.json is not valid JSON — metrics data may be corrupt",
|
||||
file: ".gsd/metrics.json",
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — metrics check failed
|
||||
}
|
||||
|
||||
// ── Metrics ledger bloat ──────────────────────────────────────────────
|
||||
// The metrics ledger has no TTL and grows by one entry per completed unit.
|
||||
// At 50 units/day a project can accumulate tens of thousands of entries over
|
||||
// months of use. Prune to the newest 1500 when the threshold is exceeded.
|
||||
try {
|
||||
const metricsFilePath = join(root, "metrics.json");
|
||||
if (existsSync(metricsFilePath)) {
|
||||
try {
|
||||
const raw = readFileSync(metricsFilePath, "utf-8");
|
||||
const parsed = JSON.parse(raw);
|
||||
const BLOAT_UNITS_THRESHOLD = 2000;
|
||||
if (parsed.version === 1 && Array.isArray(parsed.units) && parsed.units.length > BLOAT_UNITS_THRESHOLD) {
|
||||
const fileSizeMB = (statSync(metricsFilePath).size / (1024 * 1024)).toFixed(1);
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "metrics_ledger_bloat",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `metrics.json has ${parsed.units.length} unit entries (${fileSizeMB}MB) — threshold is ${BLOAT_UNITS_THRESHOLD}. Run /gsd doctor --fix to prune to the newest 1500 entries.`,
|
||||
file: ".gsd/metrics.json",
|
||||
fixable: true,
|
||||
});
|
||||
if (shouldFix("metrics_ledger_bloat")) {
|
||||
const { pruneMetricsLedger } = await import("./metrics.js");
|
||||
const removed = pruneMetricsLedger(basePath, 1500);
|
||||
fixesApplied.push(`pruned metrics ledger: removed ${removed} oldest entries (${parsed.units.length - removed} remain)`);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// JSON parse failed — already handled by the integrity check above
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — metrics bloat check failed
|
||||
}
|
||||
|
||||
// ── Large planning file detection ──────────────────────────────────────
|
||||
// Files over 100KB can cause LLM context pressure. Report the worst offenders.
|
||||
try {
|
||||
const MAX_FILE_BYTES = 100 * 1024; // 100KB
|
||||
const milestonesPath = milestonesDir(basePath);
|
||||
if (existsSync(milestonesPath)) {
|
||||
const largeFiles: Array<{ path: string; sizeKB: number }> = [];
|
||||
function scanForLargeFiles(dir: string, depth = 0): void {
|
||||
if (depth > 6) return;
|
||||
try {
|
||||
for (const entry of readdirSync(dir)) {
|
||||
const full = join(dir, entry);
|
||||
try {
|
||||
const s = statSync(full);
|
||||
if (s.isDirectory()) { scanForLargeFiles(full, depth + 1); continue; }
|
||||
if (entry.endsWith(".md") && s.size > MAX_FILE_BYTES) {
|
||||
largeFiles.push({ path: full.replace(basePath + "/", ""), sizeKB: Math.round(s.size / 1024) });
|
||||
}
|
||||
} catch { /* skip entry */ }
|
||||
}
|
||||
} catch { /* skip dir */ }
|
||||
}
|
||||
scanForLargeFiles(milestonesPath);
|
||||
if (largeFiles.length > 0) {
|
||||
largeFiles.sort((a, b) => b.sizeKB - a.sizeKB);
|
||||
const worst = largeFiles[0]!;
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "large_planning_file",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `${largeFiles.length} planning file(s) exceed 100KB — largest: ${worst.path} (${worst.sizeKB}KB). Large files cause LLM context pressure.`,
|
||||
file: worst.path,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — large file scan failed
|
||||
}
|
||||
|
||||
// ── Snapshot ref bloat ────────────────────────────────────────────────
|
||||
// refs/gsd/snapshots/ accumulate over time. Prune to newest 5 per label
|
||||
// when total count exceeds threshold.
|
||||
try {
|
||||
if (nativeIsRepo(basePath)) {
|
||||
const refs = nativeForEachRef(basePath, "refs/gsd/snapshots/");
|
||||
if (refs.length > 50) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "snapshot_ref_bloat",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `${refs.length} snapshot refs found under refs/gsd/snapshots/ — pruning to newest 5 per label will reclaim git storage`,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("snapshot_ref_bloat")) {
|
||||
const byLabel = new Map<string, string[]>();
|
||||
for (const ref of refs) {
|
||||
const parts = ref.split("/");
|
||||
const label = parts.slice(0, -1).join("/");
|
||||
if (!byLabel.has(label)) byLabel.set(label, []);
|
||||
byLabel.get(label)!.push(ref);
|
||||
}
|
||||
let pruned = 0;
|
||||
for (const [, labelRefs] of byLabel) {
|
||||
const sorted = labelRefs.sort();
|
||||
for (const old of sorted.slice(0, -5)) {
|
||||
try {
|
||||
nativeUpdateRef(basePath, old);
|
||||
pruned++;
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
}
|
||||
if (pruned > 0) {
|
||||
fixesApplied.push(`pruned ${pruned} old snapshot ref(s)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — snapshot ref check failed
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build STATE.md markdown content from derived state.
|
||||
* Local helper used by checkRuntimeHealth for STATE.md drift detection and repair.
|
||||
*/
|
||||
function buildStateMarkdownForCheck(state: Awaited<ReturnType<typeof deriveState>>): string {
|
||||
const lines: string[] = [];
|
||||
lines.push("# GSD State", "");
|
||||
|
||||
const activeMilestone = state.activeMilestone
|
||||
? `${state.activeMilestone.id}: ${state.activeMilestone.title}`
|
||||
: "None";
|
||||
const activeSlice = state.activeSlice
|
||||
? `${state.activeSlice.id}: ${state.activeSlice.title}`
|
||||
: "None";
|
||||
|
||||
lines.push(`**Active Milestone:** ${activeMilestone}`);
|
||||
lines.push(`**Active Slice:** ${activeSlice}`);
|
||||
lines.push(`**Phase:** ${state.phase}`);
|
||||
if (state.requirements) {
|
||||
lines.push(`**Requirements Status:** ${state.requirements.active} active · ${state.requirements.validated} validated · ${state.requirements.deferred} deferred · ${state.requirements.outOfScope} out of scope`);
|
||||
}
|
||||
lines.push("");
|
||||
lines.push("## Milestone Registry");
|
||||
|
||||
for (const entry of state.registry) {
|
||||
const glyph = entry.status === "complete" ? "\u2705" : entry.status === "active" ? "\uD83D\uDD04" : entry.status === "parked" ? "\u23F8\uFE0F" : "\u2B1C";
|
||||
lines.push(`- ${glyph} **${entry.id}:** ${entry.title}`);
|
||||
}
|
||||
|
||||
lines.push("");
|
||||
lines.push("## Recent Decisions");
|
||||
if (state.recentDecisions.length > 0) {
|
||||
for (const decision of state.recentDecisions) lines.push(`- ${decision}`);
|
||||
} else {
|
||||
lines.push("- None recorded");
|
||||
}
|
||||
|
||||
lines.push("");
|
||||
lines.push("## Blockers");
|
||||
if (state.blockers.length > 0) {
|
||||
for (const blocker of state.blockers) lines.push(`- ${blocker}`);
|
||||
} else {
|
||||
lines.push("- None");
|
||||
}
|
||||
|
||||
lines.push("");
|
||||
lines.push("## Next Action");
|
||||
lines.push(state.nextAction || "None");
|
||||
lines.push("");
|
||||
|
||||
return lines.join("\n");
|
||||
}
|
||||
Loading…
Add table
Reference in a new issue