fix: use atomic writes for completed-units.json and invalidate caches in db-writer (#1069)

Addresses state safety issues found during #1062 deep dive:

1. completed-units.json writes in auto-worktree.ts and auto-worktree-sync.ts
   used plain writeFileSync which could produce truncated/corrupt files on
   crash, losing completion keys and causing unit re-dispatch. Switched to
   atomicWriteSync (temp file + rename) for crash safety.

2. Plan file checkbox reconciliation in auto-worktree.ts also switched to
   atomicWriteSync to prevent partial PLAN.md writes on crash.

3. db-writer.ts functions (saveDecisionToDb, updateRequirementInDb,
   saveArtifactToDb) wrote markdown files via saveFile() without invalidating
   caches afterward. Added targeted cache invalidation (state + path + parse)
   so deriveState() always sees fresh data. Uses individual invalidation
   functions rather than invalidateAllCaches() to avoid clearing the artifacts
   table that was just written to.
This commit is contained in:
Jeremy McSpadden 2026-03-17 23:01:08 -05:00 committed by GitHub
parent 668f12b97f
commit 60dfaabe03
3 changed files with 25 additions and 5 deletions

View file

@ -10,10 +10,11 @@
* Also contains resource staleness detection and stale worktree escape.
*/
import { existsSync, mkdirSync, readFileSync, writeFileSync, cpSync, unlinkSync, readdirSync } from "node:fs";
import { existsSync, mkdirSync, readFileSync, cpSync, unlinkSync, readdirSync } from "node:fs";
import { join, sep as pathSep } from "node:path";
import { homedir } from "node:os";
import { safeCopy, safeCopyRecursive } from "./safe-fs.js";
import { atomicWriteSync } from "./atomic-write.js";
// ─── Project Root → Worktree Sync ─────────────────────────────────────────
@ -79,7 +80,7 @@ export function syncStateToProjectRoot(worktreePath: string, projectRoot: string
try { dstKeys = JSON.parse(readFileSync(dstKeysFile, "utf8")); } catch { /* ignore corrupt dst */ }
}
const merged = [...new Set([...dstKeys, ...srcKeys])];
writeFileSync(dstKeysFile, JSON.stringify(merged, null, 2));
atomicWriteSync(dstKeysFile, JSON.stringify(merged, null, 2));
} catch { /* non-fatal */ }
}

View file

@ -6,10 +6,11 @@
* manages create, enter, detect, and teardown for auto-mode worktrees.
*/
import { existsSync, cpSync, readFileSync, writeFileSync, readdirSync, mkdirSync, realpathSync, unlinkSync } from "node:fs";
import { existsSync, cpSync, readFileSync, readdirSync, mkdirSync, realpathSync, unlinkSync } from "node:fs";
import { isAbsolute, join } from "node:path";
import { GSDError, GSD_IO_ERROR, GSD_GIT_ERROR } from "./errors.js";
import { copyWorktreeDb, reconcileWorktreeDb, isDbAvailable } from "./gsd-db.js";
import { atomicWriteSync } from "./atomic-write.js";
import { execSync, execFileSync } from "node:child_process";
import { safeCopy, safeCopyRecursive } from "./safe-fs.js";
import {
@ -183,7 +184,7 @@ function reconcilePlanCheckboxes(projectRoot: string, wtPath: string, milestoneI
if (changed) {
try {
writeFileSync(dstFile, updated, "utf-8");
atomicWriteSync(dstFile, updated, "utf-8");
} catch { /* non-fatal */ }
}
}
@ -201,7 +202,7 @@ function reconcilePlanCheckboxes(projectRoot: string, wtPath: string, milestoneI
const merged = [...new Set([...dst, ...src])];
if (merged.length > dst.length) {
mkdirSync(join(wtPath, ".gsd"), { recursive: true });
writeFileSync(dstKeys, JSON.stringify(merged), "utf-8");
atomicWriteSync(dstKeys, JSON.stringify(merged), "utf-8");
}
} catch { /* non-fatal */ }
}

View file

@ -13,6 +13,9 @@ import type { Decision, Requirement } from './types.js';
import { resolveGsdRootFile } from './paths.js';
import { saveFile } from './files.js';
import { GSDError, GSD_STALE_STATE, GSD_IO_ERROR } from './errors.js';
import { invalidateStateCache } from './state.js';
import { clearPathCache } from './paths.js';
import { clearParseCache } from './files.js';
// ─── Markdown Generators ──────────────────────────────────────────────────
@ -226,6 +229,11 @@ export async function saveDecisionToDb(
const md = generateDecisionsMd(allDecisions);
const filePath = resolveGsdRootFile(basePath, 'DECISIONS');
await saveFile(filePath, md);
// Invalidate file-read caches so deriveState() sees the updated markdown.
// Do NOT clear the artifacts table — we just wrote to it intentionally.
invalidateStateCache();
clearPathCache();
clearParseCache();
return { id };
} catch (err) {
@ -290,6 +298,11 @@ export async function updateRequirementInDb(
const md = generateRequirementsMd(nonSuperseded);
const filePath = resolveGsdRootFile(basePath, 'REQUIREMENTS');
await saveFile(filePath, md);
// Invalidate file-read caches so deriveState() sees the updated markdown.
// Do NOT clear the artifacts table — we just wrote to it intentionally.
invalidateStateCache();
clearPathCache();
clearParseCache();
} catch (err) {
process.stderr.write(`gsd-db: updateRequirementInDb failed: ${(err as Error).message}\n`);
throw err;
@ -335,6 +348,11 @@ export async function saveArtifactToDb(
throw new GSDError(GSD_IO_ERROR, `saveArtifactToDb: path escapes .gsd/ directory: ${opts.path}`);
}
await saveFile(fullPath, opts.content);
// Invalidate file-read caches so deriveState() sees the updated markdown.
// Do NOT clear the artifacts table — we just wrote to it intentionally.
invalidateStateCache();
clearPathCache();
clearParseCache();
} catch (err) {
process.stderr.write(`gsd-db: saveArtifactToDb failed: ${(err as Error).message}\n`);
throw err;