refactor: extract shared JSON persistence utility, migrate metrics + routing-history + unit-runtime (#1206)

Eliminates repeated try/catch JSON file load/save boilerplate across three
modules by introducing loadJsonFile, loadJsonFileOrNull, and saveJsonFile
in a shared json-persistence.ts utility.

Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
TÂCHES 2026-03-18 11:25:32 -06:00 committed by GitHub
parent ba6a7d5ee9
commit f824bd2007
4 changed files with 96 additions and 59 deletions

View file

@ -0,0 +1,52 @@
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "node:fs";
import { dirname } from "node:path";
/**
* Load a JSON file with validation, returning a default on failure.
* Handles missing files, corrupt JSON, and schema mismatches uniformly.
*/
export function loadJsonFile<T>(
filePath: string,
validate: (data: unknown) => data is T,
defaultFactory: () => T,
): T {
try {
if (!existsSync(filePath)) return defaultFactory();
const raw = readFileSync(filePath, "utf-8");
const parsed = JSON.parse(raw);
return validate(parsed) ? parsed : defaultFactory();
} catch {
return defaultFactory();
}
}
/**
* Load a JSON file with validation, returning null on failure.
* For callers that distinguish "no data" from "default data".
*/
export function loadJsonFileOrNull<T>(
filePath: string,
validate: (data: unknown) => data is T,
): T | null {
try {
if (!existsSync(filePath)) return null;
const raw = readFileSync(filePath, "utf-8");
const parsed = JSON.parse(raw);
return validate(parsed) ? parsed : null;
} catch {
return null;
}
}
/**
* Save a JSON file, creating parent directories as needed.
* Non-fatal swallows errors to prevent persistence from breaking operations.
*/
export function saveJsonFile<T>(filePath: string, data: T): void {
try {
mkdirSync(dirname(filePath), { recursive: true });
writeFileSync(filePath, JSON.stringify(data, null, 2) + "\n", "utf-8");
} catch {
// Non-fatal — don't let persistence failures break operation
}
}

View file

@ -13,11 +13,11 @@
* 4. On crash recovery or fresh start, the ledger is loaded from disk
*/
import { readFileSync, writeFileSync, mkdirSync } from "node:fs";
import { join } from "node:path";
import type { ExtensionContext } from "@gsd/pi-coding-agent";
import { gsdRoot } from "./paths.js";
import { getAndClearSkills } from "./skill-telemetry.js";
import { loadJsonFile, loadJsonFileOrNull, saveJsonFile } from "./json-persistence.js";
// Re-export from shared — canonical implementation lives in format-utils.
export { formatTokenCount } from "../shared/mod.js";
@ -502,45 +502,31 @@ function metricsPath(base: string): string {
return join(gsdRoot(base), "metrics.json");
}
function isMetricsLedger(data: unknown): data is MetricsLedger {
return (
typeof data === "object" &&
data !== null &&
(data as MetricsLedger).version === 1 &&
Array.isArray((data as MetricsLedger).units)
);
}
function defaultLedger(): MetricsLedger {
return { version: 1, projectStartedAt: Date.now(), units: [] };
}
/**
* Load ledger from disk without initializing in-memory state.
* Used by history/export commands outside of auto-mode.
*/
export function loadLedgerFromDisk(base: string): MetricsLedger | null {
try {
const raw = readFileSync(metricsPath(base), "utf-8");
const parsed = JSON.parse(raw);
if (parsed.version === 1 && Array.isArray(parsed.units)) {
return parsed as MetricsLedger;
}
} catch {
// File doesn't exist or is corrupt
}
return null;
return loadJsonFileOrNull(metricsPath(base), isMetricsLedger);
}
function loadLedger(base: string): MetricsLedger {
try {
const raw = readFileSync(metricsPath(base), "utf-8");
const parsed = JSON.parse(raw);
if (parsed.version === 1 && Array.isArray(parsed.units)) {
return parsed as MetricsLedger;
}
} catch {
// File doesn't exist or is corrupt — start fresh
}
return {
version: 1,
projectStartedAt: Date.now(),
units: [],
};
return loadJsonFile(metricsPath(base), isMetricsLedger, defaultLedger);
}
function saveLedger(base: string, data: MetricsLedger): void {
try {
mkdirSync(gsdRoot(base), { recursive: true });
writeFileSync(metricsPath(base), JSON.stringify(data, null, 2) + "\n", "utf-8");
} catch {
// Don't let metrics failures break auto-mode
}
saveJsonFile(metricsPath(base), data);
}

View file

@ -2,10 +2,10 @@
// Tracks success/failure per tier per unit-type pattern to improve
// classification accuracy over time.
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "node:fs";
import { join } from "node:path";
import { gsdRoot } from "./paths.js";
import type { ComplexityTier } from "./types.js";
import { loadJsonFile, saveJsonFile } from "./json-persistence.js";
// ─── Types ───────────────────────────────────────────────────────────────────
@ -267,24 +267,20 @@ function historyPath(base: string): string {
return join(gsdRoot(base), HISTORY_FILE);
}
function isRoutingHistoryData(data: unknown): data is RoutingHistoryData {
return (
typeof data === "object" &&
data !== null &&
(data as RoutingHistoryData).version === 1 &&
typeof (data as RoutingHistoryData).patterns === "object" &&
(data as RoutingHistoryData).patterns !== null
);
}
function loadHistory(base: string): RoutingHistoryData {
try {
const raw = readFileSync(historyPath(base), "utf-8");
const parsed = JSON.parse(raw);
if (parsed.version === 1 && parsed.patterns) {
return parsed as RoutingHistoryData;
}
} catch {
// File doesn't exist or is corrupt — start fresh
}
return createEmptyHistory();
return loadJsonFile(historyPath(base), isRoutingHistoryData, createEmptyHistory);
}
function saveHistory(base: string, data: RoutingHistoryData): void {
try {
mkdirSync(gsdRoot(base), { recursive: true });
writeFileSync(historyPath(base), JSON.stringify(data, null, 2) + "\n", "utf-8");
} catch {
// Non-fatal — don't let history failures break auto-mode
}
saveJsonFile(historyPath(base), data);
}

View file

@ -1,4 +1,4 @@
import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync, unlinkSync } from "node:fs";
import { existsSync, readdirSync, readFileSync, unlinkSync } from "node:fs";
import { join } from "node:path";
import {
gsdRoot,
@ -8,6 +8,7 @@ import {
resolveTaskFile,
} from "./paths.js";
import { loadFile, parseTaskPlanMustHaves, countMustHavesMentionedInSummary } from "./files.js";
import { loadJsonFileOrNull, saveJsonFile } from "./json-persistence.js";
export type UnitRuntimePhase =
| "dispatched"
@ -46,6 +47,16 @@ export interface AutoUnitRuntimeRecord {
lastRecoveryReason?: "idle" | "hard";
}
function isAutoUnitRuntimeRecord(data: unknown): data is AutoUnitRuntimeRecord {
return (
typeof data === "object" &&
data !== null &&
(data as AutoUnitRuntimeRecord).version === 1 &&
typeof (data as AutoUnitRuntimeRecord).unitType === "string" &&
typeof (data as AutoUnitRuntimeRecord).unitId === "string"
);
}
function runtimeDir(basePath: string): string {
return join(gsdRoot(basePath), "runtime", "units");
}
@ -63,8 +74,6 @@ export function writeUnitRuntimeRecord(
startedAt: number,
updates: Partial<AutoUnitRuntimeRecord> = {},
): AutoUnitRuntimeRecord {
const dir = runtimeDir(basePath);
mkdirSync(dir, { recursive: true });
const path = runtimePath(basePath, unitType, unitId);
const prev = readUnitRuntimeRecord(basePath, unitType, unitId);
const next: AutoUnitRuntimeRecord = {
@ -84,18 +93,12 @@ export function writeUnitRuntimeRecord(
recoveryAttempts: updates.recoveryAttempts ?? prev?.recoveryAttempts ?? 0,
lastRecoveryReason: updates.lastRecoveryReason ?? prev?.lastRecoveryReason,
};
writeFileSync(path, JSON.stringify(next, null, 2) + "\n", "utf-8");
saveJsonFile(path, next);
return next;
}
export function readUnitRuntimeRecord(basePath: string, unitType: string, unitId: string): AutoUnitRuntimeRecord | null {
const path = runtimePath(basePath, unitType, unitId);
if (!existsSync(path)) return null;
try {
return JSON.parse(readFileSync(path, "utf-8")) as AutoUnitRuntimeRecord;
} catch {
return null;
}
return loadJsonFileOrNull(runtimePath(basePath, unitType, unitId), isAutoUnitRuntimeRecord);
}
export function clearUnitRuntimeRecord(basePath: string, unitType: string, unitId: string): void {