fix: harden auto-mode against stale integration metadata and Windows file locks (#1633)
Fixes #1575
This commit is contained in:
parent
55d6c7d9f1
commit
6277440581
9 changed files with 665 additions and 37 deletions
|
|
@ -120,6 +120,37 @@ rm -rf "$(dirname .gsd)/.gsd.lock"
|
|||
|
||||
**Fix:** GSD auto-resolves conflicts on `.gsd/` runtime files. For content conflicts in code files, the LLM is given an opportunity to resolve them via a fix-merge session. If that fails, manual resolution is needed.
|
||||
|
||||
### Pre-dispatch says the milestone integration branch no longer exists
|
||||
|
||||
**Symptoms:** Auto mode or `/gsd doctor` reports that a milestone recorded an integration branch that no longer exists in git.
|
||||
|
||||
**What it means:** The milestone's `.gsd/milestones/<MID>/<MID>-META.json` still points at the branch that was active when the milestone started, but that branch has since been renamed or deleted.
|
||||
|
||||
**Current behavior:**
|
||||
- If GSD can deterministically recover to a safe branch, it no longer hard-stops auto mode.
|
||||
- Safe fallbacks are:
|
||||
- explicit `git.main_branch` when configured and present
|
||||
- the repo's detected default integration branch (for example `main` or `master`)
|
||||
- In that case `/gsd doctor` reports a warning and `/gsd doctor fix` rewrites the stale metadata to the effective branch.
|
||||
- GSD still blocks when no safe fallback branch can be determined.
|
||||
|
||||
**Fix:**
|
||||
- Run `/gsd doctor fix` to rewrite the stale milestone metadata automatically when the fallback is obvious.
|
||||
- If GSD still blocks, recreate the missing branch or update your git preferences so `git.main_branch` points at a real branch.
|
||||
|
||||
### Transient `EBUSY` / `EPERM` / `EACCES` while writing `.gsd/` files
|
||||
|
||||
**Symptoms:** On Windows, auto mode or doctor occasionally fails while updating `.gsd/` files with errors like `EBUSY`, `EPERM`, or `EACCES`.
|
||||
|
||||
**Cause:** Antivirus, indexers, editors, or filesystem watchers can briefly lock the destination or temp file just as GSD performs the atomic rename.
|
||||
|
||||
**Current behavior:** GSD now retries those transient rename failures with a short bounded backoff before surfacing an error. The retry is intentionally limited so genuine filesystem problems still fail loudly instead of hanging forever.
|
||||
|
||||
**Fix:**
|
||||
- Re-run the operation; most transient lock races clear quickly.
|
||||
- If the error persists, close tools that may be holding the file open and then retry.
|
||||
- If repeated failures continue, run `/gsd doctor` to confirm the repo state is still healthy and report the exact path + error code.
|
||||
|
||||
## MCP Client Issues
|
||||
|
||||
### `mcp_servers` shows no configured servers
|
||||
|
|
|
|||
|
|
@ -1,21 +1,179 @@
|
|||
import { writeFileSync, renameSync, unlinkSync, mkdirSync, promises as fs } from "node:fs"
|
||||
import { dirname } from "node:path"
|
||||
import { randomBytes } from "node:crypto"
|
||||
import { writeFileSync, renameSync, unlinkSync, mkdirSync, promises as fs } from "node:fs";
|
||||
import { dirname } from "node:path";
|
||||
import { randomBytes } from "node:crypto";
|
||||
|
||||
const TRANSIENT_LOCK_ERROR_CODES = new Set(["EBUSY", "EPERM", "EACCES"]);
|
||||
const MAX_RENAME_ATTEMPTS = 5;
|
||||
const SYNC_SLEEP_BUFFER = new SharedArrayBuffer(4);
|
||||
const SYNC_SLEEP_VIEW = new Int32Array(SYNC_SLEEP_BUFFER);
|
||||
|
||||
type RetryableEncoding = BufferEncoding;
|
||||
type MkdirOptions = { recursive: true };
|
||||
|
||||
export interface AtomicWriteAsyncOps {
|
||||
mkdir(path: string, options: MkdirOptions): Promise<void>;
|
||||
writeFile(path: string, content: string, encoding: RetryableEncoding): Promise<void>;
|
||||
rename(from: string, to: string): Promise<void>;
|
||||
unlink(path: string): Promise<void>;
|
||||
sleep(ms: number): Promise<void>;
|
||||
createTempPath?(filePath: string): string;
|
||||
}
|
||||
|
||||
export interface AtomicWriteSyncOps {
|
||||
mkdir(path: string, options: MkdirOptions): void;
|
||||
writeFile(path: string, content: string, encoding: RetryableEncoding): void;
|
||||
rename(from: string, to: string): void;
|
||||
unlink(path: string): void;
|
||||
sleep(ms: number): void;
|
||||
createTempPath?(filePath: string): string;
|
||||
}
|
||||
|
||||
function defaultTempPath(filePath: string): string {
|
||||
return filePath + `.tmp.${randomBytes(4).toString("hex")}`;
|
||||
}
|
||||
|
||||
function computeRetryDelayMs(attempt: number): number {
|
||||
const base = 8 * attempt;
|
||||
const jitter = randomBytes(1)[0] % 5;
|
||||
return base + jitter;
|
||||
}
|
||||
|
||||
function delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
function sleepSync(ms: number): void {
|
||||
Atomics.wait(SYNC_SLEEP_VIEW, 0, 0, ms);
|
||||
}
|
||||
|
||||
function normalizeErrnoCode(error: unknown): string | undefined {
|
||||
if (error && typeof error === "object" && "code" in error) {
|
||||
const code = (error as { code?: unknown }).code;
|
||||
return typeof code === "string" ? code : undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function isTransientLockError(error: unknown): boolean {
|
||||
const code = normalizeErrnoCode(error);
|
||||
return typeof code === "string" && TRANSIENT_LOCK_ERROR_CODES.has(code);
|
||||
}
|
||||
|
||||
function buildAtomicWriteError(filePath: string, attempts: number, error: unknown): Error {
|
||||
const code = normalizeErrnoCode(error) ?? "UNKNOWN";
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
const wrapped = new Error(
|
||||
`Atomic write to ${filePath} failed after ${attempts} attempts (last error code: ${code}): ${message}`,
|
||||
) as NodeJS.ErrnoException;
|
||||
wrapped.code = code;
|
||||
if (error instanceof Error && "stack" in error && error.stack) {
|
||||
wrapped.stack = error.stack;
|
||||
}
|
||||
return wrapped;
|
||||
}
|
||||
|
||||
async function cleanupTempFileAsync(tmpPath: string, ops: AtomicWriteAsyncOps): Promise<void> {
|
||||
try {
|
||||
await ops.unlink(tmpPath);
|
||||
} catch {
|
||||
// Best-effort cleanup only.
|
||||
}
|
||||
}
|
||||
|
||||
function cleanupTempFileSync(tmpPath: string, ops: AtomicWriteSyncOps): void {
|
||||
try {
|
||||
ops.unlink(tmpPath);
|
||||
} catch {
|
||||
// Best-effort cleanup only.
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal Exported for retry/cleanup tests. */
|
||||
export async function atomicWriteAsyncWithOps(
|
||||
filePath: string,
|
||||
content: string,
|
||||
encoding: RetryableEncoding = "utf-8",
|
||||
ops: AtomicWriteAsyncOps,
|
||||
): Promise<void> {
|
||||
await ops.mkdir(dirname(filePath), { recursive: true });
|
||||
const tmpPath = ops.createTempPath?.(filePath) ?? defaultTempPath(filePath);
|
||||
await ops.writeFile(tmpPath, content, encoding);
|
||||
|
||||
let lastError: unknown = null;
|
||||
let attempts = 0;
|
||||
|
||||
for (attempts = 1; attempts <= MAX_RENAME_ATTEMPTS; attempts++) {
|
||||
try {
|
||||
await ops.rename(tmpPath, filePath);
|
||||
return;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
if (!isTransientLockError(error) || attempts === MAX_RENAME_ATTEMPTS) {
|
||||
break;
|
||||
}
|
||||
await ops.sleep(computeRetryDelayMs(attempts));
|
||||
}
|
||||
}
|
||||
|
||||
await cleanupTempFileAsync(tmpPath, ops);
|
||||
throw buildAtomicWriteError(filePath, attempts, lastError);
|
||||
}
|
||||
|
||||
/** @internal Exported for retry/cleanup tests. */
|
||||
export function atomicWriteSyncWithOps(
|
||||
filePath: string,
|
||||
content: string,
|
||||
encoding: RetryableEncoding = "utf-8",
|
||||
ops: AtomicWriteSyncOps,
|
||||
): void {
|
||||
ops.mkdir(dirname(filePath), { recursive: true });
|
||||
const tmpPath = ops.createTempPath?.(filePath) ?? defaultTempPath(filePath);
|
||||
ops.writeFile(tmpPath, content, encoding);
|
||||
|
||||
let lastError: unknown = null;
|
||||
let attempts = 0;
|
||||
|
||||
for (attempts = 1; attempts <= MAX_RENAME_ATTEMPTS; attempts++) {
|
||||
try {
|
||||
ops.rename(tmpPath, filePath);
|
||||
return;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
if (!isTransientLockError(error) || attempts === MAX_RENAME_ATTEMPTS) {
|
||||
break;
|
||||
}
|
||||
ops.sleep(computeRetryDelayMs(attempts));
|
||||
}
|
||||
}
|
||||
|
||||
cleanupTempFileSync(tmpPath, ops);
|
||||
throw buildAtomicWriteError(filePath, attempts, lastError);
|
||||
}
|
||||
|
||||
const DEFAULT_ASYNC_OPS: AtomicWriteAsyncOps = {
|
||||
mkdir: async (path, options) => {
|
||||
await fs.mkdir(path, options);
|
||||
},
|
||||
writeFile: (path, content, encoding) => fs.writeFile(path, content, encoding),
|
||||
rename: (from, to) => fs.rename(from, to),
|
||||
unlink: (path) => fs.unlink(path),
|
||||
sleep: delay,
|
||||
};
|
||||
|
||||
const DEFAULT_SYNC_OPS: AtomicWriteSyncOps = {
|
||||
mkdir: (path, options) => mkdirSync(path, options),
|
||||
writeFile: (path, content, encoding) => writeFileSync(path, content, encoding),
|
||||
rename: (from, to) => renameSync(from, to),
|
||||
unlink: (path) => unlinkSync(path),
|
||||
sleep: sleepSync,
|
||||
};
|
||||
|
||||
/**
|
||||
* Atomically writes content to a file by writing to a temp file first,
|
||||
* then renaming. Prevents partial/corrupt files on crash.
|
||||
*/
|
||||
export function atomicWriteSync(filePath: string, content: string, encoding: BufferEncoding = "utf-8"): void {
|
||||
mkdirSync(dirname(filePath), { recursive: true })
|
||||
const tmpPath = filePath + `.tmp.${randomBytes(4).toString("hex")}`
|
||||
writeFileSync(tmpPath, content, encoding)
|
||||
try {
|
||||
renameSync(tmpPath, filePath)
|
||||
} catch (err) {
|
||||
try { unlinkSync(tmpPath) } catch { /* orphan cleanup best-effort */ }
|
||||
throw err
|
||||
}
|
||||
return atomicWriteSyncWithOps(filePath, content, encoding, DEFAULT_SYNC_OPS);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -23,13 +181,5 @@ export function atomicWriteSync(filePath: string, content: string, encoding: Buf
|
|||
* by writing to a temp file first, then renaming.
|
||||
*/
|
||||
export async function atomicWriteAsync(filePath: string, content: string, encoding: BufferEncoding = "utf-8"): Promise<void> {
|
||||
await fs.mkdir(dirname(filePath), { recursive: true })
|
||||
const tmpPath = filePath + `.tmp.${randomBytes(4).toString("hex")}`
|
||||
await fs.writeFile(tmpPath, content, encoding)
|
||||
try {
|
||||
await fs.rename(tmpPath, filePath)
|
||||
} catch (err) {
|
||||
await fs.unlink(tmpPath).catch(() => { /* orphan cleanup best-effort */ })
|
||||
throw err
|
||||
}
|
||||
return atomicWriteAsyncWithOps(filePath, content, encoding, DEFAULT_ASYNC_OPS);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,12 +9,13 @@ import { deriveState, isMilestoneComplete } from "./state.js";
|
|||
import { saveFile } from "./files.js";
|
||||
import { listWorktrees, resolveGitDir, worktreesDir } from "./worktree-manager.js";
|
||||
import { abortAndReset } from "./git-self-heal.js";
|
||||
import { RUNTIME_EXCLUSION_PATHS, readIntegrationBranch } from "./git-service.js";
|
||||
import { RUNTIME_EXCLUSION_PATHS, resolveMilestoneIntegrationBranch, writeIntegrationBranch } from "./git-service.js";
|
||||
import { nativeIsRepo, nativeBranchExists, nativeWorktreeList, nativeWorktreeRemove, nativeBranchList, nativeBranchDelete, nativeLsFiles, nativeRmCached } from "./native-git-bridge.js";
|
||||
import { readCrashLock, isLockProcessAlive, clearLock } from "./crash-recovery.js";
|
||||
import { ensureGitignore } from "./gitignore.js";
|
||||
import { readAllSessionStatuses, isSessionStale, removeSessionStatus } from "./session-status-io.js";
|
||||
import { recoverFailedMigration } from "./migrate-external.js";
|
||||
import { loadEffectiveGSDPreferences } from "./preferences.js";
|
||||
|
||||
export async function checkGitHealth(
|
||||
basePath: string,
|
||||
|
|
@ -223,17 +224,34 @@ export async function checkGitHealth(
|
|||
// and causes the next merge operation to fail silently.
|
||||
try {
|
||||
const state = await deriveState(basePath);
|
||||
const gitPrefs = loadEffectiveGSDPreferences()?.preferences?.git ?? {};
|
||||
for (const milestone of state.registry) {
|
||||
if (milestone.status === "complete") continue;
|
||||
const integrationBranch = readIntegrationBranch(basePath, milestone.id);
|
||||
if (!integrationBranch) continue; // No stored branch — skip (not yet set)
|
||||
if (!nativeBranchExists(basePath, integrationBranch)) {
|
||||
const resolution = resolveMilestoneIntegrationBranch(basePath, milestone.id, gitPrefs);
|
||||
if (!resolution.recordedBranch) continue; // No stored branch — skip (not yet set)
|
||||
if (resolution.status === "fallback" && resolution.effectiveBranch) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "integration_branch_missing",
|
||||
scope: "milestone",
|
||||
unitId: milestone.id,
|
||||
message: resolution.reason,
|
||||
fixable: true,
|
||||
});
|
||||
if (shouldFix("integration_branch_missing")) {
|
||||
writeIntegrationBranch(basePath, milestone.id, resolution.effectiveBranch);
|
||||
fixesApplied.push(`updated integration branch for ${milestone.id} to "${resolution.effectiveBranch}"`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (resolution.status === "missing") {
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "integration_branch_missing",
|
||||
scope: "milestone",
|
||||
unitId: milestone.id,
|
||||
message: `Milestone ${milestone.id} recorded integration branch "${integrationBranch}" but that branch no longer exists in git. Merge-back will fail.`,
|
||||
message: resolution.reason,
|
||||
fixable: false,
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,8 +21,9 @@ import { readCrashLock, isLockProcessAlive, clearLock } from "./crash-recovery.j
|
|||
import { abortAndReset } from "./git-self-heal.js";
|
||||
import { rebuildState } from "./doctor.js";
|
||||
import { deriveState } from "./state.js";
|
||||
import { readIntegrationBranch } from "./git-service.js";
|
||||
import { nativeBranchExists, nativeIsRepo } from "./native-git-bridge.js";
|
||||
import { resolveMilestoneIntegrationBranch } from "./git-service.js";
|
||||
import { nativeIsRepo } from "./native-git-bridge.js";
|
||||
import { loadEffectiveGSDPreferences } from "./preferences.js";
|
||||
|
||||
// ── Health Score Tracking ──────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -276,11 +277,15 @@ export async function preDispatchHealthGate(basePath: string): Promise<PreDispat
|
|||
if (nativeIsRepo(basePath)) {
|
||||
const state = await deriveState(basePath);
|
||||
if (state.activeMilestone) {
|
||||
const integrationBranch = readIntegrationBranch(basePath, state.activeMilestone.id);
|
||||
if (integrationBranch && !nativeBranchExists(basePath, integrationBranch)) {
|
||||
const gitPrefs = loadEffectiveGSDPreferences()?.preferences?.git ?? {};
|
||||
const resolution = resolveMilestoneIntegrationBranch(basePath, state.activeMilestone.id, gitPrefs);
|
||||
if (resolution.status === "fallback" && resolution.effectiveBranch) {
|
||||
fixesApplied.push(
|
||||
`using fallback integration branch "${resolution.effectiveBranch}" for milestone ${state.activeMilestone.id}; recorded "${resolution.recordedBranch}" no longer exists`,
|
||||
);
|
||||
} else if (resolution.recordedBranch && resolution.status === "missing") {
|
||||
issues.push(
|
||||
`Integration branch "${integrationBranch}" for milestone ${state.activeMilestone.id} no longer exists in git. ` +
|
||||
`Restore the branch or update the integration branch before dispatching. Run /gsd doctor for details.`,
|
||||
`${resolution.reason} Restore the branch or update the integration branch before dispatching. Run /gsd doctor for details.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -276,6 +276,91 @@ export function writeIntegrationBranch(
|
|||
// .gsd/ is managed externally (symlinked) — metadata is not committed to git.
|
||||
}
|
||||
|
||||
export type IntegrationBranchResolutionStatus = "recorded" | "fallback" | "missing";
|
||||
|
||||
export interface IntegrationBranchResolution {
|
||||
recordedBranch: string | null;
|
||||
effectiveBranch: string | null;
|
||||
status: IntegrationBranchResolutionStatus;
|
||||
reason: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a milestone's recorded integration branch into an actionable status.
|
||||
*
|
||||
* This helper is intentionally scoped to milestones that already have recorded
|
||||
* metadata. If no integration branch is recorded, it returns `missing` with no
|
||||
* effective branch so callers can continue with their existing non-milestone
|
||||
* fallback logic (for example worktree/current-branch detection in getMainBranch).
|
||||
*/
|
||||
export function resolveMilestoneIntegrationBranch(
|
||||
basePath: string,
|
||||
milestoneId: string,
|
||||
prefs: GitPreferences = {},
|
||||
): IntegrationBranchResolution {
|
||||
const recordedBranch = readIntegrationBranch(basePath, milestoneId);
|
||||
if (!recordedBranch) {
|
||||
return {
|
||||
recordedBranch: null,
|
||||
effectiveBranch: null,
|
||||
status: "missing",
|
||||
reason: `Milestone ${milestoneId} has no recorded integration branch metadata.`,
|
||||
};
|
||||
}
|
||||
|
||||
if (nativeBranchExists(basePath, recordedBranch)) {
|
||||
return {
|
||||
recordedBranch,
|
||||
effectiveBranch: recordedBranch,
|
||||
status: "recorded",
|
||||
reason: `Using recorded integration branch "${recordedBranch}" for milestone ${milestoneId}.`,
|
||||
};
|
||||
}
|
||||
|
||||
const configuredBranch = prefs.main_branch && VALID_BRANCH_NAME.test(prefs.main_branch)
|
||||
? prefs.main_branch
|
||||
: null;
|
||||
|
||||
if (configuredBranch) {
|
||||
if (nativeBranchExists(basePath, configuredBranch)) {
|
||||
return {
|
||||
recordedBranch,
|
||||
effectiveBranch: configuredBranch,
|
||||
status: "fallback",
|
||||
reason: `Recorded integration branch "${recordedBranch}" for milestone ${milestoneId} no longer exists; using configured git.main_branch "${configuredBranch}" instead.`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
recordedBranch,
|
||||
effectiveBranch: null,
|
||||
status: "missing",
|
||||
reason: `Recorded integration branch "${recordedBranch}" for milestone ${milestoneId} no longer exists, and configured git.main_branch "${configuredBranch}" is unavailable.`,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const detectedBranch = nativeDetectMainBranch(basePath);
|
||||
if (detectedBranch && VALID_BRANCH_NAME.test(detectedBranch) && nativeBranchExists(basePath, detectedBranch)) {
|
||||
return {
|
||||
recordedBranch,
|
||||
effectiveBranch: detectedBranch,
|
||||
status: "fallback",
|
||||
reason: `Recorded integration branch "${recordedBranch}" for milestone ${milestoneId} no longer exists; using detected fallback branch "${detectedBranch}" instead.`,
|
||||
};
|
||||
}
|
||||
} catch {
|
||||
// Fall through to the explicit missing result below.
|
||||
}
|
||||
|
||||
return {
|
||||
recordedBranch,
|
||||
effectiveBranch: null,
|
||||
status: "missing",
|
||||
reason: `Recorded integration branch "${recordedBranch}" for milestone ${milestoneId} no longer exists, and no safe fallback branch could be determined.`,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Git Helper ────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
|
|
@ -480,10 +565,9 @@ export class GitServiceImpl {
|
|||
|
||||
// Check milestone integration branch — recorded when auto-mode starts
|
||||
if (this._milestoneId) {
|
||||
const integrationBranch = readIntegrationBranch(this.basePath, this._milestoneId);
|
||||
if (integrationBranch) {
|
||||
// Verify the branch still exists locally (could have been deleted)
|
||||
if (nativeBranchExists(this.basePath, integrationBranch)) return integrationBranch;
|
||||
const resolved = resolveMilestoneIntegrationBranch(this.basePath, this._milestoneId);
|
||||
if (resolved.effectiveBranch) {
|
||||
return resolved.effectiveBranch;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
144
src/resources/extensions/gsd/tests/atomic-write.test.ts
Normal file
144
src/resources/extensions/gsd/tests/atomic-write.test.ts
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
|
||||
import {
|
||||
atomicWriteAsyncWithOps,
|
||||
atomicWriteSyncWithOps,
|
||||
type AtomicWriteAsyncOps,
|
||||
type AtomicWriteSyncOps,
|
||||
} from "../atomic-write.ts";
|
||||
|
||||
function makeError(code: string, message = code): NodeJS.ErrnoException {
|
||||
const err = new Error(message) as NodeJS.ErrnoException;
|
||||
err.code = code;
|
||||
return err;
|
||||
}
|
||||
|
||||
function createAsyncHarness(plan: Array<Error | null>) {
|
||||
const files = new Map<string, string>();
|
||||
const renameCalls: Array<{ from: string; to: string }> = [];
|
||||
const unlinkCalls: string[] = [];
|
||||
const sleepCalls: number[] = [];
|
||||
let tempCounter = 0;
|
||||
|
||||
const ops: AtomicWriteAsyncOps = {
|
||||
mkdir: async () => {},
|
||||
writeFile: async (path, content) => {
|
||||
files.set(path, String(content));
|
||||
},
|
||||
rename: async (from, to) => {
|
||||
renameCalls.push({ from, to });
|
||||
const outcome = plan.shift() ?? null;
|
||||
if (outcome) throw outcome;
|
||||
const content = files.get(from);
|
||||
if (content === undefined) throw makeError("ENOENT", "temp missing");
|
||||
files.set(to, content);
|
||||
files.delete(from);
|
||||
},
|
||||
unlink: async (path) => {
|
||||
unlinkCalls.push(path);
|
||||
files.delete(path);
|
||||
},
|
||||
sleep: async (ms) => {
|
||||
sleepCalls.push(ms);
|
||||
},
|
||||
createTempPath: (filePath) => `${filePath}.tmp.test-${++tempCounter}`,
|
||||
};
|
||||
|
||||
return { ops, files, renameCalls, unlinkCalls, sleepCalls };
|
||||
}
|
||||
|
||||
function createSyncHarness(plan: Array<Error | null>) {
|
||||
const files = new Map<string, string>();
|
||||
const renameCalls: Array<{ from: string; to: string }> = [];
|
||||
const unlinkCalls: string[] = [];
|
||||
const sleepCalls: number[] = [];
|
||||
let tempCounter = 0;
|
||||
|
||||
const ops: AtomicWriteSyncOps = {
|
||||
mkdir: () => {},
|
||||
writeFile: (path, content) => {
|
||||
files.set(path, String(content));
|
||||
},
|
||||
rename: (from, to) => {
|
||||
renameCalls.push({ from, to });
|
||||
const outcome = plan.shift() ?? null;
|
||||
if (outcome) throw outcome;
|
||||
const content = files.get(from);
|
||||
if (content === undefined) throw makeError("ENOENT", "temp missing");
|
||||
files.set(to, content);
|
||||
files.delete(from);
|
||||
},
|
||||
unlink: (path) => {
|
||||
unlinkCalls.push(path);
|
||||
files.delete(path);
|
||||
},
|
||||
sleep: (ms) => {
|
||||
sleepCalls.push(ms);
|
||||
},
|
||||
createTempPath: (filePath) => `${filePath}.tmp.test-${++tempCounter}`,
|
||||
};
|
||||
|
||||
return { ops, files, renameCalls, unlinkCalls, sleepCalls };
|
||||
}
|
||||
|
||||
test("atomicWriteAsync retries transient rename failures and preserves atomicity", async () => {
|
||||
const harness = createAsyncHarness([makeError("EBUSY"), makeError("EPERM"), null]);
|
||||
harness.files.set("C:/tmp/output.txt", "old-content");
|
||||
|
||||
await atomicWriteAsyncWithOps("C:/tmp/output.txt", "new-content", "utf-8", harness.ops);
|
||||
|
||||
assert.equal(harness.renameCalls.length, 3);
|
||||
assert.equal(harness.files.get("C:/tmp/output.txt"), "new-content");
|
||||
assert.equal(harness.unlinkCalls.length, 0);
|
||||
assert.equal(harness.sleepCalls.length, 2);
|
||||
});
|
||||
|
||||
test("atomicWriteAsync cleans up temp file and reports attempts after repeated transient failures", async () => {
|
||||
const harness = createAsyncHarness([
|
||||
makeError("EACCES"),
|
||||
makeError("EBUSY"),
|
||||
makeError("EPERM"),
|
||||
makeError("EACCES"),
|
||||
makeError("EBUSY"),
|
||||
]);
|
||||
harness.files.set("C:/tmp/output.txt", "old-content");
|
||||
|
||||
await assert.rejects(
|
||||
atomicWriteAsyncWithOps("C:/tmp/output.txt", "new-content", "utf-8", harness.ops),
|
||||
(error: unknown) => {
|
||||
assert.match(String(error), /C:\\\/tmp\/output\.txt|C:\/tmp\/output\.txt/);
|
||||
assert.match(String(error), /attempt/i);
|
||||
assert.match(String(error), /EBUSY|EPERM|EACCES/);
|
||||
return true;
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(harness.renameCalls.length, 5);
|
||||
assert.equal(harness.files.get("C:/tmp/output.txt"), "old-content");
|
||||
assert.equal(harness.unlinkCalls.length, 1);
|
||||
});
|
||||
|
||||
test("atomicWriteAsync does not retry non-transient rename failures", async () => {
|
||||
const harness = createAsyncHarness([makeError("ENOENT")]);
|
||||
harness.files.set("C:/tmp/output.txt", "old-content");
|
||||
|
||||
await assert.rejects(() => atomicWriteAsyncWithOps("C:/tmp/output.txt", "new-content", "utf-8", harness.ops));
|
||||
|
||||
assert.equal(harness.renameCalls.length, 1);
|
||||
assert.equal(harness.sleepCalls.length, 0);
|
||||
assert.equal(harness.unlinkCalls.length, 1);
|
||||
assert.equal(harness.files.get("C:/tmp/output.txt"), "old-content");
|
||||
});
|
||||
|
||||
test("atomicWriteSync retries transient rename failures and succeeds", () => {
|
||||
const harness = createSyncHarness([makeError("EACCES"), makeError("EBUSY"), null]);
|
||||
harness.files.set("C:/tmp/output.txt", "old-content");
|
||||
|
||||
atomicWriteSyncWithOps("C:/tmp/output.txt", "new-content", "utf-8", harness.ops);
|
||||
|
||||
assert.equal(harness.renameCalls.length, 3);
|
||||
assert.equal(harness.sleepCalls.length, 2);
|
||||
assert.equal(harness.unlinkCalls.length, 0);
|
||||
assert.equal(harness.files.get("C:/tmp/output.txt"), "new-content");
|
||||
});
|
||||
|
|
@ -8,7 +8,7 @@
|
|||
* integration_branch_missing, worktree_directory_orphaned
|
||||
*/
|
||||
|
||||
import { mkdtempSync, mkdirSync, writeFileSync, rmSync, existsSync, realpathSync } from "node:fs";
|
||||
import { mkdtempSync, mkdirSync, writeFileSync, rmSync, existsSync, realpathSync, readFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
import { execSync } from "node:child_process";
|
||||
|
|
@ -345,6 +345,73 @@ async function main(): Promise<void> {
|
|||
}
|
||||
|
||||
// ─── Test: Orphaned worktree directory ─────────────────────────────
|
||||
console.log("\n=== integration_branch_missing: stale metadata with detected fallback ===");
|
||||
{
|
||||
const dir = createRepoWithActiveMilestone();
|
||||
cleanups.push(dir);
|
||||
|
||||
const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json");
|
||||
writeFileSync(metaPath, JSON.stringify({ integrationBranch: "feat/does-not-exist" }, null, 2));
|
||||
|
||||
const detect = await runGSDDoctor(dir);
|
||||
const missingBranchIssues = detect.issues.filter(i => i.code === "integration_branch_missing");
|
||||
assertEq(missingBranchIssues.length, 1, "reports one stale integration branch issue");
|
||||
assertEq(missingBranchIssues[0]?.severity, "warning", "stale metadata is warning when a fallback branch exists");
|
||||
assertEq(missingBranchIssues[0]?.fixable, true, "stale metadata becomes auto-fixable when fallback exists");
|
||||
assertTrue(
|
||||
missingBranchIssues[0]?.message.includes("feat/does-not-exist") &&
|
||||
missingBranchIssues[0]?.message.includes("main"),
|
||||
"warning mentions stale recorded branch and detected fallback branch",
|
||||
);
|
||||
|
||||
const fixed = await runGSDDoctor(dir, { fix: true });
|
||||
assertTrue(
|
||||
fixed.fixesApplied.some(f => f.includes('updated integration branch for M001 to "main"')),
|
||||
"doctor fix rewrites stale integration branch metadata to detected fallback branch",
|
||||
);
|
||||
|
||||
const repairedMeta = JSON.parse(readFileSync(metaPath, "utf-8"));
|
||||
assertEq(repairedMeta.integrationBranch, "main", "metadata rewritten to detected fallback branch");
|
||||
}
|
||||
|
||||
console.log("\n=== integration_branch_missing: stale metadata with configured fallback ===");
|
||||
{
|
||||
const dir = createRepoWithActiveMilestone();
|
||||
cleanups.push(dir);
|
||||
|
||||
run("git branch trunk", dir);
|
||||
writeFileSync(join(dir, ".gsd", "preferences.md"), `---\ngit:\n isolation: "worktree"\n main_branch: "trunk"\n---\n`);
|
||||
|
||||
const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json");
|
||||
writeFileSync(metaPath, JSON.stringify({ integrationBranch: "feat/does-not-exist" }, null, 2));
|
||||
|
||||
const previousCwd = process.cwd();
|
||||
process.chdir(dir);
|
||||
try {
|
||||
const detect = await runGSDDoctor(dir);
|
||||
const missingBranchIssues = detect.issues.filter(i => i.code === "integration_branch_missing");
|
||||
assertEq(missingBranchIssues.length, 1, "configured fallback still reports one stale integration branch issue");
|
||||
assertEq(missingBranchIssues[0]?.severity, "warning", "configured fallback keeps stale metadata at warning severity");
|
||||
assertEq(missingBranchIssues[0]?.fixable, true, "configured fallback remains auto-fixable");
|
||||
assertTrue(
|
||||
missingBranchIssues[0]?.message.includes("feat/does-not-exist") &&
|
||||
missingBranchIssues[0]?.message.includes("trunk"),
|
||||
"warning mentions stale recorded branch and configured fallback branch",
|
||||
);
|
||||
|
||||
const fixed = await runGSDDoctor(dir, { fix: true });
|
||||
assertTrue(
|
||||
fixed.fixesApplied.some(f => f.includes('updated integration branch for M001 to "trunk"')),
|
||||
"doctor fix rewrites stale metadata to configured fallback branch",
|
||||
);
|
||||
} finally {
|
||||
process.chdir(previousCwd);
|
||||
}
|
||||
|
||||
const repairedMeta = JSON.parse(readFileSync(metaPath, "utf-8"));
|
||||
assertEq(repairedMeta.integrationBranch, "trunk", "metadata rewritten to configured fallback branch");
|
||||
}
|
||||
|
||||
if (process.platform !== "win32") {
|
||||
console.log("\n=== worktree_directory_orphaned ===");
|
||||
{
|
||||
|
|
|
|||
|
|
@ -43,6 +43,33 @@ function createGitRepo(): string {
|
|||
return dir;
|
||||
}
|
||||
|
||||
function createRepoWithActiveMilestone(): string {
|
||||
const dir = createGitRepo();
|
||||
const msDir = join(dir, ".gsd", "milestones", "M001");
|
||||
mkdirSync(msDir, { recursive: true });
|
||||
writeFileSync(join(msDir, "ROADMAP.md"), `---
|
||||
id: M001
|
||||
title: "Active Milestone"
|
||||
---
|
||||
|
||||
# M001: Active Milestone
|
||||
|
||||
## Vision
|
||||
Test
|
||||
|
||||
## Success Criteria
|
||||
- Done
|
||||
|
||||
## Slices
|
||||
- [ ] **S01: Test slice** \`risk:low\` \`depends:[]\`
|
||||
> After this: done
|
||||
|
||||
## Boundary Map
|
||||
_None_
|
||||
`);
|
||||
return dir;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const cleanups: string[] = [];
|
||||
|
||||
|
|
@ -265,6 +292,48 @@ async function main(): Promise<void> {
|
|||
assertTrue(result.issues.length === 0, "no blocking issues after heal");
|
||||
}
|
||||
|
||||
console.log("\n=== health gate: stale integration branch uses detected fallback ===");
|
||||
{
|
||||
const dir = createRepoWithActiveMilestone();
|
||||
cleanups.push(dir);
|
||||
|
||||
const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json");
|
||||
writeFileSync(metaPath, JSON.stringify({ integrationBranch: "feature/missing" }, null, 2));
|
||||
|
||||
const result = await preDispatchHealthGate(dir);
|
||||
assertTrue(result.proceed, "gate does not block when stale integration branch has detected fallback");
|
||||
assertEq(result.issues.length, 0, "stale integration branch with fallback is not a blocking issue");
|
||||
assertTrue(
|
||||
result.fixesApplied.some(f => f.includes('feature/missing') && f.includes('main')),
|
||||
"fixesApplied reports stale recorded branch and detected fallback branch",
|
||||
);
|
||||
}
|
||||
|
||||
console.log("\n=== health gate: stale integration branch uses configured fallback ===");
|
||||
{
|
||||
const dir = createRepoWithActiveMilestone();
|
||||
cleanups.push(dir);
|
||||
|
||||
run("git branch trunk", dir);
|
||||
writeFileSync(join(dir, ".gsd", "preferences.md"), `---\ngit:\n main_branch: "trunk"\n---\n`);
|
||||
const metaPath = join(dir, ".gsd", "milestones", "M001", "M001-META.json");
|
||||
writeFileSync(metaPath, JSON.stringify({ integrationBranch: "feature/missing" }, null, 2));
|
||||
|
||||
const previousCwd = process.cwd();
|
||||
process.chdir(dir);
|
||||
try {
|
||||
const result = await preDispatchHealthGate(dir);
|
||||
assertTrue(result.proceed, "gate does not block when configured main_branch can be used as fallback");
|
||||
assertEq(result.issues.length, 0, "configured fallback is not treated as a blocking issue");
|
||||
assertTrue(
|
||||
result.fixesApplied.some(f => f.includes('feature/missing') && f.includes('trunk')),
|
||||
"fixesApplied reports stale recorded branch and configured fallback branch",
|
||||
);
|
||||
} finally {
|
||||
process.chdir(previousCwd);
|
||||
}
|
||||
}
|
||||
|
||||
} finally {
|
||||
resetProactiveHealing();
|
||||
for (const dir of cleanups) {
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import {
|
|||
VALID_BRANCH_NAME,
|
||||
runGit,
|
||||
readIntegrationBranch,
|
||||
resolveMilestoneIntegrationBranch,
|
||||
writeIntegrationBranch,
|
||||
type GitPreferences,
|
||||
type CommitOptions,
|
||||
|
|
@ -991,6 +992,65 @@ async function main(): Promise<void> {
|
|||
rmSync(repo, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// ─── resolveMilestoneIntegrationBranch: recorded branch wins when it exists ───
|
||||
|
||||
console.log("\n=== Integration branch: resolver prefers recorded branch ===");
|
||||
|
||||
{
|
||||
const repo = initBranchTestRepo();
|
||||
run("git checkout -b feature/live", repo);
|
||||
run("git checkout main", repo);
|
||||
writeIntegrationBranch(repo, "M001", "feature/live");
|
||||
|
||||
const resolved = resolveMilestoneIntegrationBranch(repo, "M001");
|
||||
assertEq(resolved.status, "recorded", "resolver reports recorded branch when metadata branch exists");
|
||||
assertEq(resolved.recordedBranch, "feature/live", "resolver includes recorded branch");
|
||||
assertEq(resolved.effectiveBranch, "feature/live", "resolver uses recorded branch as effective branch");
|
||||
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// ─── resolveMilestoneIntegrationBranch: falls back to detected default ────────
|
||||
|
||||
console.log("\n=== Integration branch: resolver falls back to detected default ===");
|
||||
|
||||
{
|
||||
const repo = initBranchTestRepo();
|
||||
writeIntegrationBranch(repo, "M001", "deleted-branch");
|
||||
|
||||
const resolved = resolveMilestoneIntegrationBranch(repo, "M001");
|
||||
assertEq(resolved.status, "fallback", "resolver reports fallback when recorded branch is stale");
|
||||
assertEq(resolved.recordedBranch, "deleted-branch", "resolver preserves stale recorded branch for diagnostics");
|
||||
assertEq(resolved.effectiveBranch, "main", "resolver falls back to detected default branch");
|
||||
assertTrue(
|
||||
resolved.reason.includes("deleted-branch") && resolved.reason.includes("main"),
|
||||
"resolver reason mentions stale recorded branch and fallback branch",
|
||||
);
|
||||
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// ─── resolveMilestoneIntegrationBranch: configured main_branch is fallback ─────
|
||||
|
||||
console.log("\n=== Integration branch: resolver uses configured fallback branch ===");
|
||||
|
||||
{
|
||||
const repo = initBranchTestRepo();
|
||||
run("git checkout -b trunk", repo);
|
||||
run("git checkout main", repo);
|
||||
writeIntegrationBranch(repo, "M001", "deleted-branch");
|
||||
|
||||
const resolved = resolveMilestoneIntegrationBranch(repo, "M001", { main_branch: "trunk" });
|
||||
assertEq(resolved.status, "fallback", "resolver reports fallback when using configured main_branch");
|
||||
assertEq(resolved.effectiveBranch, "trunk", "resolver prefers configured main_branch as fallback");
|
||||
assertTrue(
|
||||
resolved.reason.includes("deleted-branch") && resolved.reason.includes("trunk"),
|
||||
"configured fallback reason mentions stale branch and configured branch",
|
||||
);
|
||||
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// ─── Per-milestone isolation: different milestones, different targets ──
|
||||
|
||||
console.log("\n=== Integration branch: per-milestone isolation ===");
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue