refactor: extend json-persistence utility and migrate top JSON I/O callsites (#1216)

Add writeJsonFileAtomic to json-persistence.ts for atomic write-to-tmp-then-rename.
Migrate 5 files from raw JSON.parse(readFileSync(...)) to loadJsonFileOrNull/writeJsonFileAtomic/saveJsonFile:
- session-status-io.ts: all read/write/signal functions
- auto-worktree-sync.ts: readResourceVersion
- auto-recovery.ts: persistCompletedKey, removePersistedKey, loadPersistedKeys
- commands-logs.ts: metrics summary read
- queue-order.ts: loadQueueOrder, saveQueueOrder

Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
TÂCHES 2026-03-18 12:06:01 -06:00 committed by GitHub
parent 3a0999c6db
commit 0fdf52625b
6 changed files with 85 additions and 95 deletions

View file

@ -39,6 +39,7 @@ import {
import { isValidationTerminal } from "./state.js";
import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync } from "node:fs";
import { atomicWriteSync } from "./atomic-write.js";
import { loadJsonFileOrNull } from "./json-persistence.js";
import { dirname, join } from "node:path";
// ─── Artifact Resolution & Verification ───────────────────────────────────────
@ -354,6 +355,10 @@ export function skipExecuteTask(
// ─── Disk-backed completed-unit helpers ───────────────────────────────────────
function isStringArray(data: unknown): data is string[] {
return Array.isArray(data) && data.every(item => typeof item === "string");
}
/** Path to the persisted completed-unit keys file. */
export function completedKeysPath(base: string): string {
return join(base, ".gsd", "completed-units.json");
@ -362,12 +367,7 @@ export function completedKeysPath(base: string): string {
/** Write a completed unit key to disk (read-modify-write append to set). */
export function persistCompletedKey(base: string, key: string): void {
const file = completedKeysPath(base);
let keys: string[] = [];
try {
if (existsSync(file)) {
keys = JSON.parse(readFileSync(file, "utf-8"));
}
} catch (e) { /* corrupt file — start fresh */ void e; }
const keys = loadJsonFileOrNull(file, isStringArray) ?? [];
const keySet = new Set(keys);
if (!keySet.has(key)) {
keys.push(key);
@ -378,27 +378,21 @@ export function persistCompletedKey(base: string, key: string): void {
/** Remove a stale completed unit key from disk. */
export function removePersistedKey(base: string, key: string): void {
const file = completedKeysPath(base);
try {
if (existsSync(file)) {
const keys: string[] = JSON.parse(readFileSync(file, "utf-8"));
const filtered = keys.filter(k => k !== key);
// Only write if the key was actually present
if (filtered.length !== keys.length) {
atomicWriteSync(file, JSON.stringify(filtered));
}
}
} catch (e) { /* non-fatal: removePersistedKey failure */ void e; }
const keys = loadJsonFileOrNull(file, isStringArray);
if (!keys) return;
const filtered = keys.filter(k => k !== key);
if (filtered.length !== keys.length) {
atomicWriteSync(file, JSON.stringify(filtered));
}
}
/** Load all completed unit keys from disk into the in-memory set. */
export function loadPersistedKeys(base: string, target: Set<string>): void {
const file = completedKeysPath(base);
try {
if (existsSync(file)) {
const keys: string[] = JSON.parse(readFileSync(file, "utf-8"));
for (const k of keys) target.add(k);
}
} catch (e) { /* non-fatal: loadPersistedKeys failure */ void e; }
const keys = loadJsonFileOrNull(file, isStringArray);
if (keys) {
for (const k of keys) target.add(k);
}
}
// ─── Merge State Reconciliation ───────────────────────────────────────────────

View file

@ -11,6 +11,7 @@
*/
import { existsSync, mkdirSync, readFileSync, cpSync, unlinkSync, readdirSync } from "node:fs";
import { loadJsonFileOrNull } from "./json-persistence.js";
import { join, sep as pathSep } from "node:path";
import { homedir } from "node:os";
import { safeCopy, safeCopyRecursive } from "./safe-fs.js";
@ -112,15 +113,15 @@ export function syncStateToProjectRoot(worktreePath: string, projectRoot: string
* Uses gsdVersion instead of syncedAt so that launching a second session
* doesn't falsely trigger staleness (#804).
*/
function isManifestWithVersion(data: unknown): data is { gsdVersion: string } {
return data !== null && typeof data === "object" && "gsdVersion" in data! && typeof (data as Record<string, unknown>).gsdVersion === "string";
}
export function readResourceVersion(): string | null {
const agentDir = process.env.GSD_CODING_AGENT_DIR || join(homedir(), ".gsd", "agent");
const manifestPath = join(agentDir, "managed-resources.json");
try {
const manifest = JSON.parse(readFileSync(manifestPath, "utf-8"));
return typeof manifest?.gsdVersion === "string" ? manifest.gsdVersion : null;
} catch {
return null;
}
const manifest = loadJsonFileOrNull(manifestPath, isManifestWithVersion);
return manifest?.gsdVersion ?? null;
}
/**

View file

@ -14,6 +14,7 @@ import type { ExtensionCommandContext } from "@gsd/pi-coding-agent";
import { existsSync, readdirSync, readFileSync, statSync, unlinkSync } from "node:fs";
import { join } from "node:path";
import { gsdRoot } from "./paths.js";
import { loadJsonFileOrNull } from "./json-persistence.js";
// ─── Types ──────────────────────────────────────────────────────────────────
@ -331,20 +332,18 @@ async function handleLogsList(basePath: string, ctx: ExtensionCommandContext): P
// Metrics summary
const metricsPath = join(gsdRoot(basePath), "metrics.json");
if (existsSync(metricsPath)) {
try {
const metrics = JSON.parse(readFileSync(metricsPath, "utf-8"));
const units = metrics?.units;
if (Array.isArray(units) && units.length > 0) {
const totalCost = units.reduce((sum: number, u: Record<string, unknown>) => sum + ((u.cost as number) ?? 0), 0);
const totalTokens = units.reduce((sum: number, u: Record<string, unknown>) => {
const t = u.tokens as Record<string, number> | undefined;
return sum + (t?.total ?? 0);
}, 0);
lines.push("");
lines.push(`Metrics: ${units.length} units tracked · $${totalCost.toFixed(2)} · ${(totalTokens / 1000).toFixed(0)}K tokens`);
}
} catch { /* ignore */ }
const isMetrics = (d: unknown): d is { units: Array<Record<string, unknown>> } =>
d !== null && typeof d === "object" && "units" in d! && Array.isArray((d as Record<string, unknown>).units);
const metrics = loadJsonFileOrNull(metricsPath, isMetrics);
if (metrics && metrics.units.length > 0) {
const units = metrics.units;
const totalCost = units.reduce((sum: number, u) => sum + ((u.cost as number) ?? 0), 0);
const totalTokens = units.reduce((sum: number, u) => {
const t = u.tokens as Record<string, number> | undefined;
return sum + (t?.total ?? 0);
}, 0);
lines.push("");
lines.push(`Metrics: ${units.length} units tracked · $${totalCost.toFixed(2)} · ${(totalTokens / 1000).toFixed(0)}K tokens`);
}
lines.push("");

View file

@ -1,4 +1,4 @@
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "node:fs";
import { existsSync, readFileSync, writeFileSync, mkdirSync, renameSync } from "node:fs";
import { dirname } from "node:path";
/**
@ -50,3 +50,18 @@ export function saveJsonFile<T>(filePath: string, data: T): void {
// Non-fatal — don't let persistence failures break operation
}
}
/**
* Write a JSON file atomically (write to .tmp, then rename).
* Creates parent directories as needed. Non-fatal on error.
*/
export function writeJsonFileAtomic<T>(filePath: string, data: T): void {
try {
mkdirSync(dirname(filePath), { recursive: true });
const tmp = filePath + ".tmp";
writeFileSync(tmp, JSON.stringify(data, null, 2), "utf-8");
renameSync(tmp, filePath);
} catch {
// Non-fatal — don't let persistence failures break operation
}
}

View file

@ -9,10 +9,10 @@
* survives branch switches and is shared across sessions.
*/
import { readFileSync, writeFileSync, existsSync } from "node:fs";
import { join } from "node:path";
import { gsdRoot } from "./paths.js";
import { milestoneIdSort } from "./milestone-ids.js";
import { loadJsonFileOrNull, saveJsonFile } from "./json-persistence.js";
// ─── Types ───────────────────────────────────────────────────────────────────
@ -45,6 +45,12 @@ function queueOrderPath(basePath: string): string {
return join(gsdRoot(basePath), "QUEUE-ORDER.json");
}
// ─── Type Guards ─────────────────────────────────────────────────────────────
function isQueueOrderFile(data: unknown): data is QueueOrderFile {
return data !== null && typeof data === "object" && "order" in data! && Array.isArray((data as QueueOrderFile).order);
}
// ─── Read / Write ────────────────────────────────────────────────────────────
/**
@ -52,15 +58,8 @@ function queueOrderPath(basePath: string): string {
* the file is corrupt/unreadable.
*/
export function loadQueueOrder(basePath: string): string[] | null {
const p = queueOrderPath(basePath);
if (!existsSync(p)) return null;
try {
const data: QueueOrderFile = JSON.parse(readFileSync(p, "utf-8"));
if (!Array.isArray(data.order)) return null;
return data.order;
} catch {
return null;
}
const data = loadJsonFileOrNull(queueOrderPath(basePath), isQueueOrderFile);
return data?.order ?? null;
}
/**
@ -71,7 +70,7 @@ export function saveQueueOrder(basePath: string, order: string[]): void {
order,
updatedAt: new Date().toISOString(),
};
writeFileSync(queueOrderPath(basePath), JSON.stringify(data, null, 2) + "\n", "utf-8");
saveJsonFile(queueOrderPath(basePath), data);
}
// ─── Sorting ─────────────────────────────────────────────────────────────────

View file

@ -11,9 +11,6 @@
*/
import {
writeFileSync,
readFileSync,
renameSync,
unlinkSync,
readdirSync,
mkdirSync,
@ -21,6 +18,7 @@ import {
} from "node:fs";
import { join } from "node:path";
import { gsdRoot } from "./paths.js";
import { loadJsonFileOrNull, writeJsonFileAtomic } from "./json-persistence.js";
// ─── Types ─────────────────────────────────────────────────────────────────
@ -49,9 +47,16 @@ export interface SignalMessage {
const PARALLEL_DIR = "parallel";
const STATUS_SUFFIX = ".status.json";
const SIGNAL_SUFFIX = ".signal.json";
const TMP_SUFFIX = ".tmp";
const DEFAULT_STALE_TIMEOUT_MS = 30_000;
function isSessionStatus(data: unknown): data is SessionStatus {
return data !== null && typeof data === "object" && "milestoneId" in data && "pid" in data;
}
function isSignalMessage(data: unknown): data is SignalMessage {
return data !== null && typeof data === "object" && "signal" in data && "sentAt" in data;
}
// ─── Helpers ───────────────────────────────────────────────────────────────
function parallelDir(basePath: string): string {
@ -86,25 +91,13 @@ function isPidAlive(pid: number): boolean {
/** Write session status atomically (write to .tmp, then rename). */
export function writeSessionStatus(basePath: string, status: SessionStatus): void {
try {
ensureParallelDir(basePath);
const dest = statusPath(basePath, status.milestoneId);
const tmp = dest + TMP_SUFFIX;
writeFileSync(tmp, JSON.stringify(status, null, 2), "utf-8");
renameSync(tmp, dest);
} catch { /* non-fatal */ }
ensureParallelDir(basePath);
writeJsonFileAtomic(statusPath(basePath, status.milestoneId), status);
}
/** Read a specific milestone's session status. */
export function readSessionStatus(basePath: string, milestoneId: string): SessionStatus | null {
try {
const p = statusPath(basePath, milestoneId);
if (!existsSync(p)) return null;
const raw = readFileSync(p, "utf-8");
return JSON.parse(raw) as SessionStatus;
} catch {
return null;
}
return loadJsonFileOrNull(statusPath(basePath, milestoneId), isSessionStatus);
}
/** Read all session status files from .gsd/parallel/. */
@ -114,13 +107,10 @@ export function readAllSessionStatuses(basePath: string): SessionStatus[] {
const results: SessionStatus[] = [];
try {
const entries = readdirSync(dir);
for (const entry of entries) {
for (const entry of readdirSync(dir)) {
if (!entry.endsWith(STATUS_SUFFIX)) continue;
try {
const raw = readFileSync(join(dir, entry), "utf-8");
results.push(JSON.parse(raw) as SessionStatus);
} catch { /* skip corrupt files */ }
const status = loadJsonFileOrNull(join(dir, entry), isSessionStatus);
if (status) results.push(status);
}
} catch { /* non-fatal */ }
return results;
@ -138,27 +128,19 @@ export function removeSessionStatus(basePath: string, milestoneId: string): void
/** Write a signal file for a worker to consume. */
export function sendSignal(basePath: string, milestoneId: string, signal: SessionSignal): void {
try {
ensureParallelDir(basePath);
const dest = signalPath(basePath, milestoneId);
const tmp = dest + TMP_SUFFIX;
const msg: SignalMessage = { signal, sentAt: Date.now(), from: "coordinator" };
writeFileSync(tmp, JSON.stringify(msg, null, 2), "utf-8");
renameSync(tmp, dest);
} catch { /* non-fatal */ }
ensureParallelDir(basePath);
const msg: SignalMessage = { signal, sentAt: Date.now(), from: "coordinator" };
writeJsonFileAtomic(signalPath(basePath, milestoneId), msg);
}
/** Read and delete a signal file (atomic consume). Returns null if no signal pending. */
export function consumeSignal(basePath: string, milestoneId: string): SignalMessage | null {
try {
const p = signalPath(basePath, milestoneId);
if (!existsSync(p)) return null;
const raw = readFileSync(p, "utf-8");
unlinkSync(p);
return JSON.parse(raw) as SignalMessage;
} catch {
return null;
const p = signalPath(basePath, milestoneId);
const msg = loadJsonFileOrNull(p, isSignalMessage);
if (msg) {
try { unlinkSync(p); } catch { /* non-fatal */ }
}
return msg;
}
// ─── Stale Detection ───────────────────────────────────────────────────────