perf: optimize hot-path lookups, cache clearing, and error resilience (#560)

* fix(undo): use invalidateAllCaches to prevent stale state after undo

After deleting summary files and modifying PLAN files, only
invalidateStateCache() was called. Path and parse caches remained
stale, causing deriveState() to return incorrect results — showing
undone tasks as still complete.

* perf: optimize hot-path lookups, cache clearing, and error resilience

- Replace O(n) Array.includes() with Set-based O(1) lookups in
  persistCompletedKey, findCommitsForUnit, and extractCommitShas
- Skip unnecessary cache invalidation for hook units in
  verifyExpectedArtifact (moved clearPathCache after hook early-return)
- Avoid redundant disk writes in removePersistedKey when key not present
- Single-pass partition for conflicted files in reconcileMergeState
  instead of two separate filter passes
- Wrap undo git operations in try/finally to guarantee cache
  invalidation even on partial failure
- Surface auto-start errors to user via ui.notify instead of
  swallowing silently (was debug-only logging)
This commit is contained in:
Flux Labs 2026-03-15 20:47:30 -05:00 committed by GitHub
parent 7bad35702e
commit a3c52b2a1b
3 changed files with 43 additions and 29 deletions

View file

@ -96,13 +96,13 @@ export function resolveExpectedArtifactPath(unitType: string, unitId: string, ba
* skipped writing the UAT file (see #176).
*/
export function verifyExpectedArtifact(unitType: string, unitId: string, base: string): boolean {
// Clear stale directory listing cache so artifact checks see fresh disk state (#431)
clearPathCache();
// Hook units have no standard artifact — always pass. Their lifecycle
// is managed by the hook engine, not the artifact verification system.
if (unitType.startsWith("hook/")) return true;
// Clear stale directory listing cache so artifact checks see fresh disk state (#431).
// Moved after hook check to avoid unnecessary cache clears for hook units.
clearPathCache();
if (unitType === "rewrite-docs") {
const overridesPath = resolveGsdRootFile(base, "OVERRIDES");
@ -296,7 +296,8 @@ export function persistCompletedKey(base: string, key: string): void {
keys = JSON.parse(readFileSync(file, "utf-8"));
}
} catch (e) { /* corrupt file — start fresh */ void e; }
if (!keys.includes(key)) {
const keySet = new Set(keys);
if (!keySet.has(key)) {
keys.push(key);
// Atomic write: tmp file + rename prevents partial writes on crash
const tmpFile = file + ".tmp";
@ -310,12 +311,15 @@ export function removePersistedKey(base: string, key: string): void {
const file = completedKeysPath(base);
try {
if (existsSync(file)) {
let keys: string[] = JSON.parse(readFileSync(file, "utf-8"));
keys = keys.filter(k => k !== key);
// Atomic write: tmp file + rename prevents partial writes on crash
const tmpFile = file + ".tmp";
writeFileSync(tmpFile, JSON.stringify(keys), "utf-8");
renameSync(tmpFile, file);
const keys: string[] = JSON.parse(readFileSync(file, "utf-8"));
const filtered = keys.filter(k => k !== key);
// Only write if the key was actually present
if (filtered.length !== keys.length) {
// Atomic write: tmp file + rename prevents partial writes on crash
const tmpFile = file + ".tmp";
writeFileSync(tmpFile, JSON.stringify(filtered), "utf-8");
renameSync(tmpFile, file);
}
}
} catch (e) { /* non-fatal: removePersistedKey failure */ void e; }
}
@ -360,8 +364,11 @@ export function reconcileMergeState(basePath: string, ctx: ExtensionContext): bo
} else {
// Still conflicted — try auto-resolving .gsd/ state file conflicts (#530)
const conflictedFiles = unmerged.trim().split("\n").filter(Boolean);
const gsdConflicts = conflictedFiles.filter(f => f.startsWith(".gsd/"));
const codeConflicts = conflictedFiles.filter(f => !f.startsWith(".gsd/"));
const gsdConflicts: string[] = [];
const codeConflicts: string[] = [];
for (const f of conflictedFiles) {
(f.startsWith(".gsd/") ? gsdConflicts : codeConflicts).push(f);
}
if (gsdConflicts.length > 0 && codeConflicts.length === 0) {
// All conflicts are in .gsd/ state files — auto-resolve by accepting theirs

View file

@ -132,6 +132,7 @@ export function checkAutoStartAfterDiscuss(): boolean {
pendingAutoStart = null;
startAuto(ctx, pi, basePath, false, { step }).catch((err) => {
ctx.ui.notify(`Auto-start failed: ${err instanceof Error ? err.message : String(err)}`, "warning");
if (process.env.GSD_DEBUG) console.error('[gsd] auto start error:', err);
});
return true;

View file

@ -102,26 +102,28 @@ export async function handleUndo(args: string, ctx: ExtensionCommandContext, _pi
// 5. Try to revert git commits from activity log
let commitsReverted = 0;
const activityDir = join(gsdRoot(basePath), "activity");
if (existsSync(activityDir)) {
const commits = findCommitsForUnit(activityDir, unitType, unitId);
if (commits.length > 0) {
for (const sha of commits.reverse()) {
try {
execFileSync("git", ["revert", "--no-commit", sha], { cwd: basePath, timeout: 10000, stdio: "ignore" });
commitsReverted++;
} catch {
// Revert conflict or already reverted — skip
try { execFileSync("git", ["revert", "--abort"], { cwd: basePath, timeout: 5000, stdio: "ignore" }); } catch { /* no-op */ }
break;
try {
if (existsSync(activityDir)) {
const commits = findCommitsForUnit(activityDir, unitType, unitId);
if (commits.length > 0) {
for (const sha of commits.reverse()) {
try {
execFileSync("git", ["revert", "--no-commit", sha], { cwd: basePath, timeout: 10000, stdio: "ignore" });
commitsReverted++;
} catch {
// Revert conflict or already reverted — skip
try { execFileSync("git", ["revert", "--abort"], { cwd: basePath, timeout: 5000, stdio: "ignore" }); } catch { /* no-op */ }
break;
}
}
}
}
} finally {
// 6. Re-derive state — always invalidate caches even if git operations fail
invalidateAllCaches();
await deriveState(basePath);
}
// 6. Re-derive state
invalidateAllCaches();
await deriveState(basePath);
// Build result message
const results: string[] = [`Undone: ${unitType} (${unitId})`];
results.push(` - Removed from completed-units.json`);
@ -172,6 +174,7 @@ function findFileWithPrefix(dir: string, prefix: string, suffix: string): string
export function findCommitsForUnit(activityDir: string, unitType: string, unitId: string): string[] {
const safeUnitId = unitId.replace(/\//g, "-");
const commitSet = new Set<string>();
const commits: string[] = [];
try {
@ -194,7 +197,8 @@ export function findCommitsForUnit(activityDir: string, unitType: string, unitId
for (const block of blocks) {
if (block.type === "tool_result" && typeof block.content === "string") {
for (const sha of extractCommitShas(block.content)) {
if (!commits.includes(sha)) {
if (!commitSet.has(sha)) {
commitSet.add(sha);
commits.push(sha);
}
}
@ -209,10 +213,12 @@ export function findCommitsForUnit(activityDir: string, unitType: string, unitId
}
export function extractCommitShas(content: string): string[] {
const seen = new Set<string>();
const commits: string[] = [];
for (const match of content.matchAll(/\[[\w/.-]+\s+([a-f0-9]{7,40})\]/g)) {
const sha = match[1];
if (sha && !commits.includes(sha)) {
if (sha && !seen.has(sha)) {
seen.add(sha);
commits.push(sha);
}
}