feat: doctor integration, merge reconciliation, dispatch hardening credit (#672)
Doctor integration: - Add "stale_parallel_session" issue code to /gsd doctor - Detects orphaned parallel sessions (dead PID or expired heartbeat) - Auto-fixable: cleans up stale .gsd/parallel/ status files Merge reconciliation (parallel-merge.ts): - determineMergeOrder: sequential or by-completion ordering - mergeCompletedMilestone: wraps existing mergeMilestoneToMain with parallel-safe error handling and session cleanup - mergeAllCompleted: sequential merge with stop-on-conflict - formatMergeResults: human-readable merge status output Dispatch hardening (PR 2 from plan): Already landed via @deseltrus contributions: - _skipDepth + MAX_SKIP_DEPTH guard (#465) - _dispatching re-entrancy mutex (#465) - inFlightTools tool-aware idle detection (#596) Tests: 54 total (15 new), 976/976 full suite passing. Suggested-by: deseltrus <deseltrus@users.noreply.github.com>
This commit is contained in:
parent
77e14a060b
commit
db1032f580
3 changed files with 384 additions and 0 deletions
|
|
@ -11,6 +11,7 @@ import { RUNTIME_EXCLUSION_PATHS } from "./git-service.js";
|
|||
import { nativeIsRepo, nativeWorktreeRemove, nativeBranchList, nativeBranchDelete, nativeLsFiles, nativeRmCached } from "./native-git-bridge.js";
|
||||
import { readCrashLock, isLockProcessAlive, clearLock } from "./crash-recovery.js";
|
||||
import { ensureGitignore } from "./gitignore.js";
|
||||
import { readAllSessionStatuses, isSessionStale, removeSessionStatus } from "./session-status-io.js";
|
||||
|
||||
export type DoctorSeverity = "info" | "warning" | "error";
|
||||
export type DoctorIssueCode =
|
||||
|
|
@ -37,6 +38,7 @@ export type DoctorIssueCode =
|
|||
| "tracked_runtime_files"
|
||||
| "legacy_slice_branches"
|
||||
| "stale_crash_lock"
|
||||
| "stale_parallel_session"
|
||||
| "orphaned_completed_units"
|
||||
| "stale_hook_state"
|
||||
| "activity_log_bloat"
|
||||
|
|
@ -711,6 +713,31 @@ async function checkRuntimeHealth(
|
|||
// Non-fatal — crash lock check failed
|
||||
}
|
||||
|
||||
// ── Stale parallel sessions ────────────────────────────────────────────
|
||||
try {
|
||||
const parallelStatuses = readAllSessionStatuses(basePath);
|
||||
for (const status of parallelStatuses) {
|
||||
if (isSessionStale(status)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "stale_parallel_session",
|
||||
scope: "project",
|
||||
unitId: status.milestoneId,
|
||||
message: `Stale parallel session for ${status.milestoneId} (PID ${status.pid}, started ${new Date(status.startedAt).toISOString()}, last heartbeat ${new Date(status.lastHeartbeat).toISOString()}) — process is no longer running`,
|
||||
file: `.gsd/parallel/${status.milestoneId}.status.json`,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("stale_parallel_session")) {
|
||||
removeSessionStatus(basePath, status.milestoneId);
|
||||
fixesApplied.push(`cleaned up stale parallel session for ${status.milestoneId}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — parallel session check failed
|
||||
}
|
||||
|
||||
// ── Orphaned completed-units keys ─────────────────────────────────────
|
||||
try {
|
||||
const completedKeysFile = join(root, "completed-units.json");
|
||||
|
|
|
|||
156
src/resources/extensions/gsd/parallel-merge.ts
Normal file
156
src/resources/extensions/gsd/parallel-merge.ts
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
/**
|
||||
* GSD Parallel Merge — Worktree reconciliation for parallel milestones.
|
||||
*
|
||||
* Handles merging completed milestone worktrees back to main branch
|
||||
* with safety checks for parallel execution context.
|
||||
*/
|
||||
|
||||
import { loadFile } from "./files.js";
|
||||
import { resolveMilestoneFile } from "./paths.js";
|
||||
import { mergeMilestoneToMain } from "./auto-worktree.js";
|
||||
import { MergeConflictError } from "./git-service.js";
|
||||
import { removeSessionStatus } from "./session-status-io.js";
|
||||
import type { WorkerInfo } from "./parallel-orchestrator.js";
|
||||
|
||||
// ─── Types ─────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface MergeResult {
|
||||
milestoneId: string;
|
||||
success: boolean;
|
||||
commitMessage?: string;
|
||||
pushed?: boolean;
|
||||
error?: string;
|
||||
conflictFiles?: string[];
|
||||
}
|
||||
|
||||
export type MergeOrder = "sequential" | "by-completion";
|
||||
|
||||
// ─── Merge Queue ───────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Determine safe merge order for completed milestones.
|
||||
* Sequential: merge in milestone ID order (M001 before M002).
|
||||
* By-completion: merge in the order milestones finished.
|
||||
*/
|
||||
export function determineMergeOrder(
|
||||
workers: WorkerInfo[],
|
||||
order: MergeOrder = "sequential",
|
||||
): string[] {
|
||||
const completed = workers.filter(w => w.state === "stopped" && w.completedUnits > 0);
|
||||
if (order === "by-completion") {
|
||||
return completed
|
||||
.sort((a, b) => a.startedAt - b.startedAt) // earliest first
|
||||
.map(w => w.milestoneId);
|
||||
}
|
||||
return completed
|
||||
.sort((a, b) => a.milestoneId.localeCompare(b.milestoneId))
|
||||
.map(w => w.milestoneId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to merge a single milestone's worktree back to main.
|
||||
* Wraps mergeMilestoneToMain with error handling for parallel context.
|
||||
*/
|
||||
export async function mergeCompletedMilestone(
|
||||
basePath: string,
|
||||
milestoneId: string,
|
||||
): Promise<MergeResult> {
|
||||
try {
|
||||
// Load the roadmap content (needed by mergeMilestoneToMain)
|
||||
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
|
||||
if (!roadmapPath) {
|
||||
return {
|
||||
milestoneId,
|
||||
success: false,
|
||||
error: `No roadmap found for ${milestoneId}`,
|
||||
};
|
||||
}
|
||||
|
||||
const roadmapContent = await loadFile(roadmapPath);
|
||||
if (!roadmapContent) {
|
||||
return {
|
||||
milestoneId,
|
||||
success: false,
|
||||
error: `Could not read roadmap for ${milestoneId}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Attempt the merge
|
||||
const result = mergeMilestoneToMain(basePath, milestoneId, roadmapContent);
|
||||
|
||||
// Clean up parallel session status
|
||||
removeSessionStatus(basePath, milestoneId);
|
||||
|
||||
return {
|
||||
milestoneId,
|
||||
success: true,
|
||||
commitMessage: result.commitMessage,
|
||||
pushed: result.pushed,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof MergeConflictError) {
|
||||
return {
|
||||
milestoneId,
|
||||
success: false,
|
||||
error: `Merge conflict: ${err.conflictedFiles.length} conflicting file(s)`,
|
||||
conflictFiles: err.conflictedFiles,
|
||||
};
|
||||
}
|
||||
return {
|
||||
milestoneId,
|
||||
success: false,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge all completed milestones in sequence.
|
||||
* Stops on first conflict and returns results so far.
|
||||
*/
|
||||
export async function mergeAllCompleted(
|
||||
basePath: string,
|
||||
workers: WorkerInfo[],
|
||||
order: MergeOrder = "sequential",
|
||||
): Promise<MergeResult[]> {
|
||||
const mergeOrder = determineMergeOrder(workers, order);
|
||||
const results: MergeResult[] = [];
|
||||
|
||||
for (const mid of mergeOrder) {
|
||||
const result = await mergeCompletedMilestone(basePath, mid);
|
||||
results.push(result);
|
||||
|
||||
// Stop on first conflict — later merges may depend on this one
|
||||
if (!result.success && result.conflictFiles) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format merge results for display.
|
||||
*/
|
||||
export function formatMergeResults(results: MergeResult[]): string {
|
||||
if (results.length === 0) return "No completed milestones to merge.";
|
||||
|
||||
const lines: string[] = ["# Merge Results\n"];
|
||||
|
||||
for (const r of results) {
|
||||
if (r.success) {
|
||||
const pushStatus = r.pushed ? " (pushed)" : "";
|
||||
lines.push(`- **${r.milestoneId}** — merged successfully${pushStatus}`);
|
||||
} else if (r.conflictFiles) {
|
||||
lines.push(`- **${r.milestoneId}** — CONFLICT (${r.conflictFiles.length} file(s)):`);
|
||||
for (const f of r.conflictFiles) {
|
||||
lines.push(` - \`${f}\``);
|
||||
}
|
||||
lines.push(` Resolve conflicts manually and run \`/gsd parallel merge ${r.milestoneId}\` to retry.`);
|
||||
} else {
|
||||
lines.push(`- **${r.milestoneId}** — failed: ${r.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
|
@ -44,6 +44,9 @@ import {
|
|||
|
||||
import { validatePreferences, resolveParallelConfig } from "../preferences.js";
|
||||
|
||||
import { determineMergeOrder, formatMergeResults, type MergeResult } from "../parallel-merge.js";
|
||||
import type { WorkerInfo } from "../parallel-orchestrator.js";
|
||||
|
||||
// ─── Test Helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
function makeTmpBase(): string {
|
||||
|
|
@ -453,3 +456,201 @@ describe("preferences: validatePreferences parallel config", () => {
|
|||
assert.ok(result.errors.some(e => e.includes("auto_merge")));
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Test Helpers (parallel-merge) ───────────────────────────────────────────
|
||||
|
||||
function makeWorker(overrides: Partial<WorkerInfo> = {}): WorkerInfo {
|
||||
return {
|
||||
milestoneId: "M001",
|
||||
title: "Test Milestone",
|
||||
pid: process.pid,
|
||||
process: null,
|
||||
worktreePath: "/tmp/test-worktree",
|
||||
startedAt: Date.now() - 60_000,
|
||||
state: "stopped",
|
||||
completedUnits: 5,
|
||||
cost: 2.50,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── parallel-merge: determineMergeOrder ─────────────────────────────────────
|
||||
|
||||
describe("parallel-merge: determineMergeOrder sequential", () => {
|
||||
it("returns milestone IDs sorted alphabetically by default", () => {
|
||||
const workers = [
|
||||
makeWorker({ milestoneId: "M003", state: "stopped", completedUnits: 1 }),
|
||||
makeWorker({ milestoneId: "M001", state: "stopped", completedUnits: 2 }),
|
||||
makeWorker({ milestoneId: "M002", state: "stopped", completedUnits: 3 }),
|
||||
];
|
||||
const order = determineMergeOrder(workers, "sequential");
|
||||
assert.deepEqual(order, ["M001", "M002", "M003"]);
|
||||
});
|
||||
|
||||
it("excludes workers that are still running", () => {
|
||||
const workers = [
|
||||
makeWorker({ milestoneId: "M001", state: "stopped", completedUnits: 5 }),
|
||||
makeWorker({ milestoneId: "M002", state: "running", completedUnits: 0 }),
|
||||
makeWorker({ milestoneId: "M003", state: "stopped", completedUnits: 2 }),
|
||||
];
|
||||
const order = determineMergeOrder(workers, "sequential");
|
||||
assert.deepEqual(order, ["M001", "M003"]);
|
||||
});
|
||||
|
||||
it("excludes workers with zero completedUnits even if stopped", () => {
|
||||
const workers = [
|
||||
makeWorker({ milestoneId: "M001", state: "stopped", completedUnits: 0 }),
|
||||
makeWorker({ milestoneId: "M002", state: "stopped", completedUnits: 3 }),
|
||||
];
|
||||
const order = determineMergeOrder(workers, "sequential");
|
||||
assert.deepEqual(order, ["M002"]);
|
||||
});
|
||||
|
||||
it("returns empty array when no workers are completed", () => {
|
||||
const workers = [
|
||||
makeWorker({ milestoneId: "M001", state: "running", completedUnits: 0 }),
|
||||
makeWorker({ milestoneId: "M002", state: "paused", completedUnits: 0 }),
|
||||
];
|
||||
const order = determineMergeOrder(workers);
|
||||
assert.deepEqual(order, []);
|
||||
});
|
||||
|
||||
it("uses sequential order as the default when no order arg provided", () => {
|
||||
const workers = [
|
||||
makeWorker({ milestoneId: "M002", state: "stopped", completedUnits: 1 }),
|
||||
makeWorker({ milestoneId: "M001", state: "stopped", completedUnits: 1 }),
|
||||
];
|
||||
// Call with no second argument — should default to "sequential"
|
||||
const order = determineMergeOrder(workers);
|
||||
assert.deepEqual(order, ["M001", "M002"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("parallel-merge: determineMergeOrder by-completion", () => {
|
||||
it("returns milestones sorted by startedAt (earliest first)", () => {
|
||||
const now = Date.now();
|
||||
const workers = [
|
||||
makeWorker({ milestoneId: "M003", state: "stopped", completedUnits: 1, startedAt: now - 30_000 }),
|
||||
makeWorker({ milestoneId: "M001", state: "stopped", completedUnits: 1, startedAt: now - 90_000 }),
|
||||
makeWorker({ milestoneId: "M002", state: "stopped", completedUnits: 1, startedAt: now - 60_000 }),
|
||||
];
|
||||
const order = determineMergeOrder(workers, "by-completion");
|
||||
assert.deepEqual(order, ["M001", "M002", "M003"]);
|
||||
});
|
||||
|
||||
it("excludes paused workers from by-completion order", () => {
|
||||
const now = Date.now();
|
||||
const workers = [
|
||||
makeWorker({ milestoneId: "M001", state: "stopped", completedUnits: 2, startedAt: now - 90_000 }),
|
||||
makeWorker({ milestoneId: "M002", state: "paused", completedUnits: 1, startedAt: now - 60_000 }),
|
||||
makeWorker({ milestoneId: "M003", state: "stopped", completedUnits: 3, startedAt: now - 30_000 }),
|
||||
];
|
||||
const order = determineMergeOrder(workers, "by-completion");
|
||||
assert.deepEqual(order, ["M001", "M003"]);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── parallel-merge: formatMergeResults ──────────────────────────────────────
|
||||
|
||||
describe("parallel-merge: formatMergeResults", () => {
|
||||
it("returns a no-op message for an empty results array", () => {
|
||||
const output = formatMergeResults([]);
|
||||
assert.equal(output, "No completed milestones to merge.");
|
||||
});
|
||||
|
||||
it("formats a single successful merge without push", () => {
|
||||
const results: MergeResult[] = [
|
||||
{ milestoneId: "M001", success: true, commitMessage: "feat: auth system", pushed: false },
|
||||
];
|
||||
const output = formatMergeResults(results);
|
||||
assert.ok(output.includes("# Merge Results"));
|
||||
assert.ok(output.includes("**M001**"));
|
||||
assert.ok(output.includes("merged successfully"));
|
||||
assert.ok(!output.includes("(pushed)"));
|
||||
});
|
||||
|
||||
it("includes (pushed) suffix when result.pushed is true", () => {
|
||||
const results: MergeResult[] = [
|
||||
{ milestoneId: "M002", success: true, commitMessage: "feat: dashboard", pushed: true },
|
||||
];
|
||||
const output = formatMergeResults(results);
|
||||
assert.ok(output.includes("(pushed)"));
|
||||
});
|
||||
|
||||
it("formats a conflict result with file list and retry instructions", () => {
|
||||
const results: MergeResult[] = [
|
||||
{
|
||||
milestoneId: "M003",
|
||||
success: false,
|
||||
conflictFiles: ["src/types.ts", "src/utils.ts"],
|
||||
error: "Merge conflict: 2 conflicting file(s)",
|
||||
},
|
||||
];
|
||||
const output = formatMergeResults(results);
|
||||
assert.ok(output.includes("**M003**"));
|
||||
assert.ok(output.includes("CONFLICT (2 file(s))"));
|
||||
assert.ok(output.includes("`src/types.ts`"));
|
||||
assert.ok(output.includes("`src/utils.ts`"));
|
||||
assert.ok(output.includes("/gsd parallel merge M003"));
|
||||
});
|
||||
|
||||
it("formats a generic error (no conflict files) with the error message", () => {
|
||||
const results: MergeResult[] = [
|
||||
{ milestoneId: "M004", success: false, error: "No roadmap found for M004" },
|
||||
];
|
||||
const output = formatMergeResults(results);
|
||||
assert.ok(output.includes("**M004**"));
|
||||
assert.ok(output.includes("failed: No roadmap found for M004"));
|
||||
assert.ok(!output.includes("CONFLICT"));
|
||||
});
|
||||
|
||||
it("formats multiple results in the order provided", () => {
|
||||
const results: MergeResult[] = [
|
||||
{ milestoneId: "M001", success: true, pushed: false },
|
||||
{ milestoneId: "M002", success: false, error: "branch not found" },
|
||||
{ milestoneId: "M003", success: true, pushed: true },
|
||||
];
|
||||
const output = formatMergeResults(results);
|
||||
const m1Pos = output.indexOf("M001");
|
||||
const m2Pos = output.indexOf("M002");
|
||||
const m3Pos = output.indexOf("M003");
|
||||
assert.ok(m1Pos < m2Pos, "M001 should appear before M002");
|
||||
assert.ok(m2Pos < m3Pos, "M002 should appear before M003");
|
||||
});
|
||||
});
|
||||
|
||||
// ─── doctor: stale_parallel_session issue code ───────────────────────────────
|
||||
|
||||
describe("doctor: stale_parallel_session issue code exists", () => {
|
||||
it("DoctorIssueCode union includes stale_parallel_session", async () => {
|
||||
// Import doctor.ts and verify the type is real by constructing a DoctorIssue
|
||||
// with code "stale_parallel_session" — TypeScript will reject it at compile
|
||||
// time if the code is not in the union; the runtime assertion confirms the
|
||||
// string value round-trips through the typed object correctly.
|
||||
const { } = await import("../doctor.js");
|
||||
// Construct a value that satisfies DoctorIssue using the code under test
|
||||
const issue: import("../doctor.js").DoctorIssue = {
|
||||
severity: "warning",
|
||||
code: "stale_parallel_session",
|
||||
scope: "project",
|
||||
unitId: "M001",
|
||||
message: "Stale parallel session detected",
|
||||
fixable: true,
|
||||
};
|
||||
assert.equal(issue.code, "stale_parallel_session");
|
||||
});
|
||||
|
||||
it("DoctorIssue with stale_parallel_session has warning severity", () => {
|
||||
const issue: import("../doctor.js").DoctorIssue = {
|
||||
severity: "warning",
|
||||
code: "stale_parallel_session",
|
||||
scope: "project",
|
||||
unitId: "M002",
|
||||
message: "Stale parallel session for M002",
|
||||
fixable: true,
|
||||
};
|
||||
assert.equal(issue.severity, "warning");
|
||||
assert.equal(issue.fixable, true);
|
||||
assert.equal(issue.scope, "project");
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue