test: parallel merge reconciliation + budget atomicity (G5/G6) (#933)

* test: parallel merge reconciliation + budget atomicity coverage (G5/G6)

27 new tests covering two gaps identified in #672:

G5 — Merge Reconciliation (parallel-merge.test.ts, 17 tests):
- determineMergeOrder: sequential, by-completion, filtering, defaults
- formatMergeResults: success, conflict, empty, mixed output
- mergeCompletedMilestone: clean merge with session cleanup, missing
  roadmap error, conflict detection with structured file list
- mergeAllCompleted: sequential order, stop-on-first-conflict,
  by-completion order (integration tests with real git repos)

G6 — Budget Atomicity (parallel-budget-atomicity.test.ts, 10 tests):
- Ceiling enforcement: exceeded, not exceeded, exact boundary
- Cost aggregation: correct sum, incremental updates
- No double-counting: 5 rapid refreshes produce correct total
- Budget reset: resetOrchestrator clears all state
- No ceiling: unlimited spending when budget_ceiling unset
- Worker state sync: refreshWorkerStatuses picks up disk changes

All tests use node:test + node:assert/strict. No production code changes.

Relates to #672

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

* fix: use double quotes in git commit messages for Windows compatibility

Single-quoted commit messages in test helpers fail on Windows CMD
(pathspec errors). Switch to double quotes which work cross-platform.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

---------

Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
deseltrus 2026-03-17 21:05:16 +01:00 committed by GitHub
parent 1ea653b5fc
commit 9fe805b1d3
2 changed files with 796 additions and 0 deletions

View file

@ -0,0 +1,331 @@
/**
* parallel-budget-atomicity.test.ts Budget enforcement tests for parallel orchestration (G6).
*
* Verifies that the budget ceiling cannot be exceeded through race conditions
* or incorrect cost aggregation. Tests the single-writer architecture:
* workers emit costs via session status files, the coordinator reads them
* sequentially via refreshWorkerStatuses().
*
* Covers:
* - Ceiling enforcement: isBudgetExceeded returns true above ceiling
* - Cost aggregation: sum across all workers is correct
* - No double-counting: multiple refreshes don't accumulate
* - Budget reset: totalCost clears after resetOrchestrator
* - No budget ceiling: isBudgetExceeded returns false when ceiling unset
*/
import test from "node:test";
import assert from "node:assert/strict";
import { mkdirSync, rmSync } from "node:fs";
import { join } from "node:path";
import { tmpdir } from "node:os";
import { randomUUID } from "node:crypto";
import {
startParallel,
getAggregateCost,
isBudgetExceeded,
refreshWorkerStatuses,
resetOrchestrator,
getOrchestratorState,
isParallelActive,
getWorkerStatuses,
} from "../parallel-orchestrator.ts";
import {
writeSessionStatus,
readSessionStatus,
removeSessionStatus,
} from "../session-status-io.ts";
import type { GSDPreferences } from "../preferences.ts";
// ─── Helpers ──────────────────────────────────────────────────────────────────
function makeTmpBase(): string {
const base = join(tmpdir(), `gsd-budget-test-${randomUUID()}`);
mkdirSync(join(base, ".gsd"), { recursive: true });
return base;
}
function cleanup(base: string): void {
try { rmSync(base, { recursive: true, force: true }); } catch { /* */ }
}
function makePrefs(ceiling?: number): GSDPreferences {
return {
parallel: {
enabled: true,
max_workers: 2,
budget_ceiling: ceiling,
merge_strategy: "per-milestone",
auto_merge: "confirm",
},
};
}
/** Write a session status file for a milestone with a specific cost. */
function writeWorkerCost(
base: string,
milestoneId: string,
cost: number,
completedUnits = 1,
): void {
writeSessionStatus(base, {
milestoneId,
pid: process.pid,
state: "running",
currentUnit: null,
completedUnits,
cost,
lastHeartbeat: Date.now(),
startedAt: Date.now() - 60000,
worktreePath: join(base, ".gsd", "worktrees", milestoneId.toLowerCase()),
});
}
// ═══════════════════════════════════════════════════════════════════════════════
// Ceiling Enforcement
// ═══════════════════════════════════════════════════════════════════════════════
test("budget — isBudgetExceeded returns true when totalCost >= ceiling", async () => {
const base = makeTmpBase();
try {
await startParallel(base, ["M001", "M002"], makePrefs(1.0));
// Initial state: cost is 0, not exceeded
assert.equal(getAggregateCost(), 0);
assert.equal(isBudgetExceeded(), false);
// Write costs that exceed the $1.00 ceiling
writeWorkerCost(base, "M001", 0.6);
writeWorkerCost(base, "M002", 0.5);
refreshWorkerStatuses(base);
// Total: 0.6 + 0.5 = 1.1 > 1.0
assert.ok(getAggregateCost() >= 1.0, `aggregate cost should be >= 1.0, got ${getAggregateCost()}`);
assert.equal(isBudgetExceeded(), true, "should be exceeded at 1.1 vs ceiling 1.0");
} finally {
resetOrchestrator();
cleanup(base);
}
});
test("budget — isBudgetExceeded returns false when totalCost < ceiling", async () => {
const base = makeTmpBase();
try {
await startParallel(base, ["M001", "M002"], makePrefs(5.0));
writeWorkerCost(base, "M001", 1.0);
writeWorkerCost(base, "M002", 1.5);
refreshWorkerStatuses(base);
// Total: 1.0 + 1.5 = 2.5 < 5.0
assert.equal(getAggregateCost(), 2.5);
assert.equal(isBudgetExceeded(), false, "should not be exceeded at 2.5 vs ceiling 5.0");
} finally {
resetOrchestrator();
cleanup(base);
}
});
test("budget — isBudgetExceeded returns true at exact ceiling", async () => {
const base = makeTmpBase();
try {
await startParallel(base, ["M001"], makePrefs(2.0));
writeWorkerCost(base, "M001", 2.0);
refreshWorkerStatuses(base);
assert.equal(getAggregateCost(), 2.0);
assert.equal(isBudgetExceeded(), true, "should be exceeded at exact ceiling");
} finally {
resetOrchestrator();
cleanup(base);
}
});
// ═══════════════════════════════════════════════════════════════════════════════
// Cost Aggregation
// ═══════════════════════════════════════════════════════════════════════════════
test("budget — cost aggregation sums all worker costs correctly", async () => {
const base = makeTmpBase();
try {
await startParallel(base, ["M001", "M002"], makePrefs(100.0));
writeWorkerCost(base, "M001", 3.14159);
writeWorkerCost(base, "M002", 2.71828);
refreshWorkerStatuses(base);
const expected = 3.14159 + 2.71828;
const actual = getAggregateCost();
assert.ok(
Math.abs(actual - expected) < 0.0001,
`cost should be ~${expected}, got ${actual}`,
);
} finally {
resetOrchestrator();
cleanup(base);
}
});
test("budget — worker cost update reflects in aggregate after refresh", async () => {
const base = makeTmpBase();
try {
await startParallel(base, ["M001"], makePrefs(10.0));
// Initial cost
writeWorkerCost(base, "M001", 0.5);
refreshWorkerStatuses(base);
assert.equal(getAggregateCost(), 0.5);
// Cost increases as worker progresses
writeWorkerCost(base, "M001", 1.5);
refreshWorkerStatuses(base);
assert.equal(getAggregateCost(), 1.5, "should reflect updated cost, not accumulated");
// Cost increases again
writeWorkerCost(base, "M001", 3.0);
refreshWorkerStatuses(base);
assert.equal(getAggregateCost(), 3.0);
} finally {
resetOrchestrator();
cleanup(base);
}
});
// ═══════════════════════════════════════════════════════════════════════════════
// No Double-Counting
// ═══════════════════════════════════════════════════════════════════════════════
test("budget — multiple refreshes don't accumulate cost", async () => {
const base = makeTmpBase();
try {
await startParallel(base, ["M001", "M002"], makePrefs(10.0));
writeWorkerCost(base, "M001", 0.5);
writeWorkerCost(base, "M002", 0.3);
// Refresh multiple times
refreshWorkerStatuses(base);
refreshWorkerStatuses(base);
refreshWorkerStatuses(base);
refreshWorkerStatuses(base);
refreshWorkerStatuses(base);
// Cost should be 0.5 + 0.3 = 0.8 regardless of how many refreshes
assert.equal(getAggregateCost(), 0.8, "cost should be 0.8 after 5 refreshes");
} finally {
resetOrchestrator();
cleanup(base);
}
});
test("budget — refresh between cost updates tracks correctly", async () => {
const base = makeTmpBase();
try {
await startParallel(base, ["M001", "M002"], makePrefs(10.0));
// Round 1: M001 has cost, M002 doesn't yet
writeWorkerCost(base, "M001", 0.5);
refreshWorkerStatuses(base);
const cost1 = getAggregateCost();
// Round 2: both workers have cost
writeWorkerCost(base, "M002", 0.7);
refreshWorkerStatuses(base);
const cost2 = getAggregateCost();
// Round 3: M001 cost increased
writeWorkerCost(base, "M001", 1.2);
refreshWorkerStatuses(base);
const cost3 = getAggregateCost();
assert.equal(cost1, 0.5, "round 1: only M001");
assert.equal(cost2, 1.2, "round 2: M001 + M002");
assert.equal(cost3, 1.9, "round 3: updated M001 + M002");
} finally {
resetOrchestrator();
cleanup(base);
}
});
// ═══════════════════════════════════════════════════════════════════════════════
// Budget Reset
// ═══════════════════════════════════════════════════════════════════════════════
test("budget — resetOrchestrator clears totalCost", async () => {
const base = makeTmpBase();
try {
await startParallel(base, ["M001"], makePrefs(10.0));
writeWorkerCost(base, "M001", 5.0);
refreshWorkerStatuses(base);
assert.equal(getAggregateCost(), 5.0, "cost should be 5.0 before reset");
resetOrchestrator();
assert.equal(getAggregateCost(), 0, "cost should be 0 after reset");
assert.equal(isBudgetExceeded(), false, "should not be exceeded after reset");
assert.equal(isParallelActive(), false, "should not be active after reset");
assert.equal(getOrchestratorState(), null, "state should be null after reset");
} finally {
resetOrchestrator();
cleanup(base);
}
});
// ═══════════════════════════════════════════════════════════════════════════════
// No Budget Ceiling
// ═══════════════════════════════════════════════════════════════════════════════
test("budget — isBudgetExceeded returns false when no ceiling configured", async () => {
const base = makeTmpBase();
try {
// No budget_ceiling set (undefined)
await startParallel(base, ["M001"], makePrefs(undefined));
writeWorkerCost(base, "M001", 999.99);
refreshWorkerStatuses(base);
assert.equal(getAggregateCost(), 999.99, "cost should be tracked even without ceiling");
assert.equal(isBudgetExceeded(), false, "should never be exceeded without ceiling");
} finally {
resetOrchestrator();
cleanup(base);
}
});
// ═══════════════════════════════════════════════════════════════════════════════
// Worker status tracking through refresh
// ═══════════════════════════════════════════════════════════════════════════════
test("budget — refreshWorkerStatuses updates worker state from disk", async () => {
const base = makeTmpBase();
try {
await startParallel(base, ["M001"], makePrefs(10.0));
// Write status with specific state
writeSessionStatus(base, {
milestoneId: "M001",
pid: process.pid,
state: "paused",
currentUnit: { type: "execute-task", id: "M001/S01/T02", startedAt: Date.now() },
completedUnits: 5,
cost: 2.5,
lastHeartbeat: Date.now(),
startedAt: Date.now() - 120000,
worktreePath: join(base, ".gsd", "worktrees", "m001"),
});
refreshWorkerStatuses(base);
const workers = getWorkerStatuses();
assert.equal(workers.length, 1);
assert.equal(workers[0]!.state, "paused", "worker state should be updated from disk");
assert.equal(workers[0]!.completedUnits, 5, "completedUnits should be updated from disk");
assert.equal(workers[0]!.cost, 2.5, "cost should be updated from disk");
} finally {
resetOrchestrator();
cleanup(base);
}
});

View file

@ -0,0 +1,465 @@
/**
* parallel-merge.test.ts Tests for parallel merge reconciliation (G5).
*
* Covers:
* - determineMergeOrder: sequential vs by-completion ordering, filtering
* - formatMergeResults: success, conflict, empty, mixed output formatting
* - mergeCompletedMilestone: clean merge with session cleanup, missing roadmap,
* conflict detection with structured error
* - mergeAllCompleted: stop-on-first-conflict, sequential execution order
*
* Pure-function tests need no git. Integration tests use temp repos with real
* git operations (same pattern as auto-worktree-milestone-merge.test.ts).
*/
import test from "node:test";
import assert from "node:assert/strict";
import {
mkdtempSync,
mkdirSync,
writeFileSync,
rmSync,
existsSync,
realpathSync,
} from "node:fs";
import { join } from "node:path";
import { tmpdir } from "node:os";
import { execSync } from "node:child_process";
import {
determineMergeOrder,
mergeCompletedMilestone,
mergeAllCompleted,
formatMergeResults,
type MergeResult,
} from "../parallel-merge.ts";
import type { WorkerInfo } from "../parallel-orchestrator.ts";
import {
writeSessionStatus,
readSessionStatus,
} from "../session-status-io.ts";
// ─── Helpers ──────────────────────────────────────────────────────────────────
function run(cmd: string, cwd: string): string {
return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim();
}
function createTempRepo(): string {
const dir = realpathSync(mkdtempSync(join(tmpdir(), "parallel-merge-test-")));
run("git init -b main", dir);
run("git config user.email test@test.com", dir);
run("git config user.name Test", dir);
writeFileSync(join(dir, "README.md"), "# test\n");
mkdirSync(join(dir, ".gsd"), { recursive: true });
writeFileSync(join(dir, ".gsd", "STATE.md"), "# State\n");
run("git add .", dir);
run("git commit -m init", dir);
return dir;
}
function makeWorker(overrides: Partial<WorkerInfo> = {}): WorkerInfo {
return {
milestoneId: "M001",
title: "Test milestone",
pid: process.pid,
process: null,
worktreePath: "/tmp/test",
startedAt: Date.now(),
state: "stopped",
completedUnits: 3,
cost: 1.5,
...overrides,
};
}
function cleanup(dir: string): void {
try { rmSync(dir, { recursive: true, force: true }); } catch { /* */ }
}
/** Set up a milestone roadmap file in .gsd/milestones/<MID>/ */
function setupRoadmap(repo: string, mid: string, title: string, slices: string[]): void {
const dir = join(repo, ".gsd", "milestones", mid);
mkdirSync(dir, { recursive: true });
const sliceLines = slices.map(s => `- [x] **${s}**`).join("\n");
writeFileSync(
join(dir, `${mid}-ROADMAP.md`),
`# ${mid}: ${title}\n\n## Slices\n${sliceLines}\n`,
);
}
/** Create a milestone branch with file changes, then return to main. */
function createMilestoneBranch(
repo: string,
mid: string,
files: Array<{ name: string; content: string }>,
): void {
run(`git checkout -b milestone/${mid}`, repo);
for (const f of files) {
const dir = join(repo, ...f.name.split("/").slice(0, -1));
if (dir !== repo) mkdirSync(dir, { recursive: true });
writeFileSync(join(repo, f.name), f.content);
}
run("git add .", repo);
run(`git commit -m "feat(${mid}): add files"`, repo);
run("git checkout main", repo);
}
// ═══════════════════════════════════════════════════════════════════════════════
// determineMergeOrder — Pure function tests
// ═══════════════════════════════════════════════════════════════════════════════
test("determineMergeOrder — sequential sorts by milestone ID", () => {
const workers = [
makeWorker({ milestoneId: "M003", startedAt: 100 }),
makeWorker({ milestoneId: "M001", startedAt: 300 }),
makeWorker({ milestoneId: "M002", startedAt: 200 }),
];
const order = determineMergeOrder(workers, "sequential");
assert.deepEqual(order, ["M001", "M002", "M003"]);
});
test("determineMergeOrder — by-completion sorts by startedAt (earliest first)", () => {
const workers = [
makeWorker({ milestoneId: "M003", startedAt: 100 }),
makeWorker({ milestoneId: "M001", startedAt: 300 }),
makeWorker({ milestoneId: "M002", startedAt: 200 }),
];
const order = determineMergeOrder(workers, "by-completion");
assert.deepEqual(order, ["M003", "M002", "M001"]);
});
test("determineMergeOrder — only includes stopped workers with completedUnits > 0", () => {
const workers = [
makeWorker({ milestoneId: "M001", state: "stopped", completedUnits: 3 }),
makeWorker({ milestoneId: "M002", state: "running", completedUnits: 2 }),
makeWorker({ milestoneId: "M003", state: "stopped", completedUnits: 0 }),
makeWorker({ milestoneId: "M004", state: "error", completedUnits: 5 }),
makeWorker({ milestoneId: "M005", state: "paused", completedUnits: 1 }),
];
const order = determineMergeOrder(workers, "sequential");
assert.deepEqual(order, ["M001"]);
});
test("determineMergeOrder — empty workers returns empty array", () => {
assert.deepEqual(determineMergeOrder([], "sequential"), []);
assert.deepEqual(determineMergeOrder([], "by-completion"), []);
});
test("determineMergeOrder — defaults to sequential when order not specified", () => {
const workers = [
makeWorker({ milestoneId: "M002" }),
makeWorker({ milestoneId: "M001" }),
];
const order = determineMergeOrder(workers);
assert.deepEqual(order, ["M001", "M002"]);
});
// ═══════════════════════════════════════════════════════════════════════════════
// formatMergeResults — Pure function tests
// ═══════════════════════════════════════════════════════════════════════════════
test("formatMergeResults — empty results", () => {
const output = formatMergeResults([]);
assert.ok(output.includes("No completed milestones"));
});
test("formatMergeResults — successful merge", () => {
const results: MergeResult[] = [
{ milestoneId: "M001", success: true, commitMessage: "feat(M001): Auth", pushed: true },
];
const output = formatMergeResults(results);
assert.ok(output.includes("M001"));
assert.ok(output.includes("merged successfully"));
assert.ok(output.includes("(pushed)"));
});
test("formatMergeResults — successful merge without push", () => {
const results: MergeResult[] = [
{ milestoneId: "M001", success: true, commitMessage: "feat(M001): Auth", pushed: false },
];
const output = formatMergeResults(results);
assert.ok(output.includes("merged successfully"));
assert.ok(!output.includes("(pushed)"));
});
test("formatMergeResults — conflict with file list", () => {
const results: MergeResult[] = [
{
milestoneId: "M002",
success: false,
error: "Merge conflict: 2 conflicting file(s)",
conflictFiles: ["src/app.ts", "src/main.ts"],
},
];
const output = formatMergeResults(results);
assert.ok(output.includes("CONFLICT"));
assert.ok(output.includes("src/app.ts"));
assert.ok(output.includes("src/main.ts"));
assert.ok(output.includes("Resolve conflicts manually"));
});
test("formatMergeResults — generic failure without conflict files", () => {
const results: MergeResult[] = [
{ milestoneId: "M003", success: false, error: "No roadmap found for M003" },
];
const output = formatMergeResults(results);
assert.ok(output.includes("M003"));
assert.ok(output.includes("failed"));
assert.ok(output.includes("No roadmap found"));
});
test("formatMergeResults — mixed results", () => {
const results: MergeResult[] = [
{ milestoneId: "M001", success: true, commitMessage: "feat(M001): OK", pushed: false },
{ milestoneId: "M002", success: false, error: "conflict", conflictFiles: ["a.ts"] },
];
const output = formatMergeResults(results);
assert.ok(output.includes("M001"));
assert.ok(output.includes("merged successfully"));
assert.ok(output.includes("M002"));
assert.ok(output.includes("CONFLICT"));
});
// ═══════════════════════════════════════════════════════════════════════════════
// mergeCompletedMilestone — Integration tests (real git)
// ═══════════════════════════════════════════════════════════════════════════════
test("mergeCompletedMilestone — missing roadmap returns error result", async () => {
const base = join(tmpdir(), `parallel-merge-noroadmap-${Date.now()}`);
mkdirSync(join(base, ".gsd"), { recursive: true });
try {
const result = await mergeCompletedMilestone(base, "M999");
assert.equal(result.success, false);
assert.ok(result.error?.includes("No roadmap found") || result.error?.includes("Could not read"));
assert.equal(result.milestoneId, "M999");
} finally {
cleanup(base);
}
});
test("mergeCompletedMilestone — clean merge, session status cleaned up", async () => {
const savedCwd = process.cwd();
const repo = createTempRepo();
try {
// Create milestone branch with a new file
createMilestoneBranch(repo, "M010", [
{ name: "auth.ts", content: "export const auth = true;\n" },
]);
// Set up roadmap
setupRoadmap(repo, "M010", "Auth System", ["S01: JWT module"]);
// Write session status to verify cleanup
writeSessionStatus(repo, {
milestoneId: "M010",
pid: process.pid,
state: "stopped",
currentUnit: null,
completedUnits: 3,
cost: 1.5,
lastHeartbeat: Date.now(),
startedAt: Date.now() - 60000,
worktreePath: join(repo, ".gsd", "worktrees", "M010"),
});
// Verify session status exists before merge
const statusBefore = readSessionStatus(repo, "M010");
assert.ok(statusBefore, "session status should exist before merge");
// Merge from project root
process.chdir(repo);
const result = await mergeCompletedMilestone(repo, "M010");
assert.equal(result.success, true, `merge should succeed: ${result.error}`);
assert.ok(result.commitMessage, "should have commit message");
assert.equal(result.milestoneId, "M010");
// Verify file merged to main
assert.ok(existsSync(join(repo, "auth.ts")), "auth.ts should be on main");
// Verify commit on main
const log = run("git log --oneline main", repo);
assert.ok(log.includes("M010"), "commit message should reference M010");
// Verify session status cleaned up
const statusAfter = readSessionStatus(repo, "M010");
assert.equal(statusAfter, null, "session status should be cleaned up after merge");
// Verify milestone branch deleted
const branches = run("git branch", repo);
assert.ok(!branches.includes("milestone/M010"), "milestone branch should be deleted");
} finally {
process.chdir(savedCwd);
cleanup(repo);
}
});
test("mergeCompletedMilestone — conflict returns structured error with file list", async () => {
const savedCwd = process.cwd();
const repo = createTempRepo();
try {
// Create milestone branch that modifies README.md
run("git checkout -b milestone/M020", repo);
writeFileSync(join(repo, "README.md"), "# M020 version\n");
run("git add .", repo);
run('git commit -m "M020 changes README"', repo);
run("git checkout main", repo);
// Modify README.md on main to create conflict
writeFileSync(join(repo, "README.md"), "# main version (diverged)\n");
run("git add .", repo);
run('git commit -m "main changes README"', repo);
// Set up roadmap
setupRoadmap(repo, "M020", "Conflict Test", ["S01: Conflict scenario"]);
process.chdir(repo);
const result = await mergeCompletedMilestone(repo, "M020");
assert.equal(result.success, false, "merge should fail with conflict");
assert.equal(result.milestoneId, "M020");
assert.ok(result.conflictFiles, "should have conflictFiles");
assert.ok(result.conflictFiles!.length > 0, "should have at least one conflict file");
assert.ok(result.conflictFiles!.includes("README.md"), "README.md should be in conflicts");
assert.ok(result.error?.includes("conflict"), "error message should mention conflict");
} finally {
process.chdir(savedCwd);
// Reset git state before cleanup (repo may be in conflicted state)
try { run("git reset --hard HEAD", repo); } catch { /* */ }
cleanup(repo);
}
});
// ═══════════════════════════════════════════════════════════════════════════════
// mergeAllCompleted — Integration tests
// ═══════════════════════════════════════════════════════════════════════════════
test("mergeAllCompleted — merges in sequential order", async () => {
const savedCwd = process.cwd();
const repo = createTempRepo();
try {
// M001: adds auth.ts
createMilestoneBranch(repo, "M001", [
{ name: "auth.ts", content: "export const auth = true;\n" },
]);
// M002: adds dashboard.ts
createMilestoneBranch(repo, "M002", [
{ name: "dashboard.ts", content: "export const dash = true;\n" },
]);
setupRoadmap(repo, "M001", "Auth", ["S01: Auth module"]);
setupRoadmap(repo, "M002", "Dashboard", ["S01: Dashboard module"]);
const workers = [
makeWorker({ milestoneId: "M002", startedAt: 100 }),
makeWorker({ milestoneId: "M001", startedAt: 200 }),
];
process.chdir(repo);
const results = await mergeAllCompleted(repo, workers, "sequential");
// Both should succeed
assert.equal(results.length, 2, "should have two results");
assert.equal(results[0]!.milestoneId, "M001", "M001 merged first (sequential)");
assert.equal(results[0]!.success, true, "M001 should succeed");
assert.equal(results[1]!.milestoneId, "M002", "M002 merged second");
assert.equal(results[1]!.success, true, "M002 should succeed");
// Both files on main
assert.ok(existsSync(join(repo, "auth.ts")), "auth.ts on main");
assert.ok(existsSync(join(repo, "dashboard.ts")), "dashboard.ts on main");
} finally {
process.chdir(savedCwd);
cleanup(repo);
}
});
test("mergeAllCompleted — stops on first conflict, skips later milestones", async () => {
const savedCwd = process.cwd();
const repo = createTempRepo();
try {
// M001: modifies README.md (will conflict with main)
run("git checkout -b milestone/M001", repo);
writeFileSync(join(repo, "README.md"), "# M001 version\n");
run("git add .", repo);
run('git commit -m "M001 changes README"', repo);
run("git checkout main", repo);
// M002: adds a new file (would NOT conflict)
createMilestoneBranch(repo, "M002", [
{ name: "feature.ts", content: "export const feature = true;\n" },
]);
// Modify README.md on main to create conflict with M001
writeFileSync(join(repo, "README.md"), "# main diverged version\n");
run("git add .", repo);
run('git commit -m "main diverges README"', repo);
setupRoadmap(repo, "M001", "Conflict milestone", ["S01: Conflict test"]);
setupRoadmap(repo, "M002", "Clean milestone", ["S01: Clean test"]);
const workers = [
makeWorker({ milestoneId: "M001" }),
makeWorker({ milestoneId: "M002" }),
];
process.chdir(repo);
const results = await mergeAllCompleted(repo, workers, "sequential");
// Only M001 attempted (conflict stops the queue)
assert.equal(results.length, 1, "should only have one result — stopped after conflict");
assert.equal(results[0]!.milestoneId, "M001");
assert.equal(results[0]!.success, false, "M001 should fail");
assert.ok(results[0]!.conflictFiles && results[0]!.conflictFiles.length > 0, "should have conflict files");
// M002 was NOT attempted
assert.ok(!results.some(r => r.milestoneId === "M002"), "M002 should not be attempted");
// feature.ts should NOT be on main (M002 never merged)
assert.ok(!existsSync(join(repo, "feature.ts")), "feature.ts should not be on main");
} finally {
process.chdir(savedCwd);
try { run("git reset --hard HEAD", repo); } catch { /* */ }
cleanup(repo);
}
});
test("mergeAllCompleted — by-completion order respects startedAt", async () => {
const savedCwd = process.cwd();
const repo = createTempRepo();
try {
// M001: adds auth.ts (started later)
createMilestoneBranch(repo, "M001", [
{ name: "auth.ts", content: "export const auth = true;\n" },
]);
// M002: adds feature.ts (started earlier)
createMilestoneBranch(repo, "M002", [
{ name: "feature.ts", content: "export const feature = true;\n" },
]);
setupRoadmap(repo, "M001", "Auth", ["S01: Auth module"]);
setupRoadmap(repo, "M002", "Feature", ["S01: Feature module"]);
const workers = [
makeWorker({ milestoneId: "M001", startedAt: 2000 }),
makeWorker({ milestoneId: "M002", startedAt: 1000 }),
];
process.chdir(repo);
const results = await mergeAllCompleted(repo, workers, "by-completion");
// M002 should be merged first (earlier startedAt)
assert.equal(results.length, 2);
assert.equal(results[0]!.milestoneId, "M002", "M002 merged first (earlier startedAt)");
assert.equal(results[1]!.milestoneId, "M001", "M001 merged second");
} finally {
process.chdir(savedCwd);
cleanup(repo);
}
});