feat(cleanup): add ~/.gsd/projects/ orphan detection and pruning (#1686)
* fix(worktree): recurse into tasks/ when syncing slice artifacts back to project root (#1678) syncWorktreeStateBack() only processed files directly in each slice directory, silently skipping the tasks/ subdirectory. Task-level summaries (T01-SUMMARY.md, T02-SUMMARY.md, etc.) were therefore never copied from the worktree back to the project root before teardown, causing data loss when the worktree was removed on milestone completion. Fix: detect the tasks/ directory entry in the inner loop and recurse into it, copying all .md files and appending them to the synced list. Consistent with how syncStateToProjectRoot() already uses recursive copy via safeCopyRecursive(). Adds regression test (case 8 in worktree-sync-milestones.test.ts) covering slice-level and task-level summary sync. * feat(cleanup): add ~/.gsd/projects/ orphan detection and pruning Introduces a complete lifecycle management story for the external project state directory (~/.gsd/projects/<hash>/). Previously these directories accumulated indefinitely with no mechanism to identify or remove them after a repo was deleted or moved. Changes: repo-identity.ts - Write `repo-meta.json` into each external state dir on first open (and backfill on any subsequent open if the file is missing). - Records: version, hash (dir name), gitRoot, remoteUrl, createdAt. - Non-fatal: metadata write failure never blocks project setup. - Export `readRepoMeta()` and `RepoMeta` interface for consumers. doctor-types.ts - Add `orphaned_project_state` to DoctorIssueCode. - Add `GLOBAL_STATE_CODES` set — codes that must never be auto-fixed at fixLevel=task (post-task automated health checks must not delete project state directories). doctor-checks.ts - Add `checkGlobalHealth()` — scans ~/.gsd/projects/, reads repo-meta.json from each dir, reports info-severity issue for any whose gitRoot is gone. - Auto-fixable with --fix; skipped entirely at fixLevel=task. doctor.ts - Import and call `checkGlobalHealth` after `checkRuntimeHealth`. - Gate on `GLOBAL_STATE_CODES` in `shouldFix` at task fixLevel. commands-maintenance.ts - Add `handleCleanupProjects(args, ctx)` — interactive audit command. - Categorises dirs as active / orphaned / unknown (no metadata yet). - Without --fix: prints full report with per-dir gitRoot + remoteUrl. - With --fix: deletes orphaned dirs, reports removed/failed counts. commands/handlers/ops.ts - Route `cleanup projects` and `cleanup projects --fix` to handler. commands/catalog.ts - Add `projects` and `projects --fix` to cleanup tab-completions. * feat(cleanup): add metrics.json bloat detection and pruning The metrics ledger has no TTL and grows by one entry per completed unit — ~1-2 KB/entry with no ceiling. On a busy project (50 units/day) this reaches 4-9 MB in 90 days and continues growing indefinitely. Changes: metrics.ts - Add pruneMetricsLedger(base, keepCount): trims oldest entries from the head of the units array, keeping the newest `keepCount`. Updates both the on-disk file and the in-memory ledger if a session is active. doctor-types.ts - Add "metrics_ledger_bloat" to DoctorIssueCode. doctor-checks.ts (checkRuntimeHealth) - Add metrics ledger bloat check after the existing integrity check. - Threshold: 2000 units / fires as "warning". - Fix: prune to newest 1500 entries via pruneMetricsLedger(). - Reports both the unit count and file size in MB in the issue message. * fix cleanup project-state path and repo-meta refresh
This commit is contained in:
parent
98530fad11
commit
ea2118d794
11 changed files with 496 additions and 9 deletions
|
|
@ -300,6 +300,31 @@ export function syncWorktreeStateBack(
|
|||
} catch {
|
||||
/* non-fatal */
|
||||
}
|
||||
} else if (fileEntry.isDirectory() && fileEntry.name === "tasks") {
|
||||
// Recurse into tasks/ to sync task-level summaries (#1678)
|
||||
const wtTasksDir = join(wtSliceDir, "tasks");
|
||||
const mainTasksDir = join(mainSliceDir, "tasks");
|
||||
try {
|
||||
mkdirSync(mainTasksDir, { recursive: true });
|
||||
for (const taskEntry of readdirSync(wtTasksDir, {
|
||||
withFileTypes: true,
|
||||
})) {
|
||||
if (taskEntry.isFile() && taskEntry.name.endsWith(".md")) {
|
||||
const src = join(wtTasksDir, taskEntry.name);
|
||||
const dst = join(mainTasksDir, taskEntry.name);
|
||||
try {
|
||||
cpSync(src, dst, { force: true });
|
||||
synced.push(
|
||||
`milestones/${milestoneId}/slices/${sid}/tasks/${taskEntry.name}`,
|
||||
);
|
||||
} catch {
|
||||
/* non-fatal */
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
/* non-fatal */
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -204,3 +204,119 @@ export async function handleDryRun(ctx: ExtensionCommandContext, basePath: strin
|
|||
|
||||
ctx.ui.notify(lines.join("\n"), "info");
|
||||
}
|
||||
|
||||
export async function handleCleanupProjects(args: string, ctx: ExtensionCommandContext): Promise<void> {
|
||||
const { readdirSync, existsSync: fsExists, rmSync: fsRmSync } = await import("node:fs");
|
||||
const { join: pathJoin } = await import("node:path");
|
||||
const { readRepoMeta, externalProjectsRoot } = await import("./repo-identity.js");
|
||||
|
||||
const fix = args.includes("--fix");
|
||||
const projectsDir = externalProjectsRoot();
|
||||
|
||||
if (!fsExists(projectsDir)) {
|
||||
ctx.ui.notify(`No project-state directory found at ${projectsDir} — nothing to clean up.`, "info");
|
||||
return;
|
||||
}
|
||||
|
||||
let hashList: string[];
|
||||
try {
|
||||
hashList = readdirSync(projectsDir, { withFileTypes: true })
|
||||
.filter(e => e.isDirectory())
|
||||
.map(e => e.name);
|
||||
} catch {
|
||||
ctx.ui.notify(`Failed to read project-state directory at ${projectsDir}.`, "error");
|
||||
return;
|
||||
}
|
||||
|
||||
if (hashList.length === 0) {
|
||||
ctx.ui.notify(`Project-state directory is empty (${projectsDir}) — nothing to clean up.`, "info");
|
||||
return;
|
||||
}
|
||||
|
||||
type ProjectEntry = { hash: string; gitRoot: string; remoteUrl: string };
|
||||
const active: ProjectEntry[] = [];
|
||||
const orphaned: ProjectEntry[] = [];
|
||||
const unknown: string[] = [];
|
||||
|
||||
for (const hash of hashList) {
|
||||
const dirPath = pathJoin(projectsDir, hash);
|
||||
const meta = readRepoMeta(dirPath);
|
||||
if (!meta) {
|
||||
unknown.push(hash);
|
||||
continue;
|
||||
}
|
||||
const entry: ProjectEntry = { hash, gitRoot: meta.gitRoot, remoteUrl: meta.remoteUrl };
|
||||
if (fsExists(meta.gitRoot)) {
|
||||
active.push(entry);
|
||||
} else {
|
||||
orphaned.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
const pl = (n: number, word: string) => `${n} ${word}${n === 1 ? "" : "s"}`;
|
||||
const lines: string[] = [
|
||||
`${projectsDir} ${pl(hashList.length, "project state director")}${hashList.length === 1 ? "y" : "ies"}`,
|
||||
"",
|
||||
];
|
||||
|
||||
if (active.length > 0) {
|
||||
lines.push(`Active (${active.length}) — git root present on disk:`);
|
||||
for (const e of active) {
|
||||
const remote = e.remoteUrl ? ` [${e.remoteUrl}]` : "";
|
||||
lines.push(` + ${e.hash} ${e.gitRoot}${remote}`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
if (orphaned.length > 0) {
|
||||
lines.push(`Orphaned (${orphaned.length}) — git root no longer exists:`);
|
||||
for (const e of orphaned) {
|
||||
const remote = e.remoteUrl ? ` [${e.remoteUrl}]` : "";
|
||||
lines.push(` - ${e.hash} ${e.gitRoot}${remote}`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
if (unknown.length > 0) {
|
||||
lines.push(`Unknown (${unknown.length}) — no metadata yet:`);
|
||||
for (const h of unknown) {
|
||||
lines.push(` ? ${h} (open that project in GSD once to register metadata)`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
if (orphaned.length === 0) {
|
||||
lines.push("No orphaned project state — all tracked repos are still present on disk.");
|
||||
if (!fix) {
|
||||
ctx.ui.notify(lines.join("\n"), "success");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!fix && orphaned.length > 0) {
|
||||
lines.push(`Run /gsd cleanup projects --fix to permanently delete ${pl(orphaned.length, "orphaned director")}${orphaned.length === 1 ? "y" : "ies"}.`);
|
||||
ctx.ui.notify(lines.join("\n"), "warning");
|
||||
return;
|
||||
}
|
||||
|
||||
if (fix && orphaned.length > 0) {
|
||||
let removed = 0;
|
||||
const failed: string[] = [];
|
||||
for (const e of orphaned) {
|
||||
try {
|
||||
fsRmSync(pathJoin(projectsDir, e.hash), { recursive: true, force: true });
|
||||
removed++;
|
||||
} catch {
|
||||
failed.push(e.hash);
|
||||
}
|
||||
}
|
||||
lines.push(`Removed ${pl(removed, "orphaned director")}${removed === 1 ? "y" : "ies"}.`);
|
||||
if (failed.length > 0) {
|
||||
lines.push(`Failed to remove: ${failed.join(", ")}`);
|
||||
}
|
||||
ctx.ui.notify(lines.join("\n"), removed > 0 ? "success" : "warning");
|
||||
return;
|
||||
}
|
||||
|
||||
ctx.ui.notify(lines.join("\n"), "info");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -138,6 +138,8 @@ const NESTED_COMPLETIONS: CompletionMap = {
|
|||
cleanup: [
|
||||
{ cmd: "branches", desc: "Remove merged milestone branches" },
|
||||
{ cmd: "snapshots", desc: "Remove old execution snapshots" },
|
||||
{ cmd: "projects", desc: "Audit orphaned ~/.gsd/projects/ state directories" },
|
||||
{ cmd: "projects --fix", desc: "Delete orphaned project state directories (cannot be undone)" },
|
||||
],
|
||||
knowledge: [
|
||||
{ cmd: "rule", desc: "Add a project rule (always/never do X)" },
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import { handleConfig } from "../../commands-config.js";
|
|||
import { handleDoctor, handleCapture, handleKnowledge, handleRunHook, handleSkillHealth, handleSteer, handleTriage, handleUpdate } from "../../commands-handlers.js";
|
||||
import { handleInspect } from "../../commands-inspect.js";
|
||||
import { handleLogs } from "../../commands-logs.js";
|
||||
import { handleCleanupBranches, handleCleanupSnapshots, handleSkip } from "../../commands-maintenance.js";
|
||||
import { handleCleanupBranches, handleCleanupSnapshots, handleSkip, handleCleanupProjects } from "../../commands-maintenance.js";
|
||||
import { handleExport } from "../../export.js";
|
||||
import { handleHistory } from "../../history.js";
|
||||
import { handleUndo } from "../../undo.js";
|
||||
|
|
@ -65,6 +65,10 @@ export async function handleOpsCommand(trimmed: string, ctx: ExtensionCommandCon
|
|||
await handleExport(trimmed.replace(/^export\s*/, "").trim(), ctx, projectRoot());
|
||||
return true;
|
||||
}
|
||||
if (trimmed === "cleanup projects" || trimmed.startsWith("cleanup projects ")) {
|
||||
await handleCleanupProjects(trimmed.replace(/^cleanup projects\s*/, "").trim(), ctx);
|
||||
return true;
|
||||
}
|
||||
if (trimmed === "cleanup") {
|
||||
await handleCleanupBranches(ctx, projectRoot());
|
||||
await handleCleanupSnapshots(ctx, projectRoot());
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import { existsSync, lstatSync, readdirSync, readFileSync, realpathSync, rmSync,
|
|||
import { basename, dirname, join, sep } from "node:path";
|
||||
|
||||
import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js";
|
||||
import { readRepoMeta, externalProjectsRoot } from "./repo-identity.js";
|
||||
import { loadFile, parseRoadmap } from "./files.js";
|
||||
import { resolveMilestoneFile, milestonesDir, gsdRoot, resolveGsdRootFile, relGsdRootFile } from "./paths.js";
|
||||
import { deriveState, isMilestoneComplete } from "./state.js";
|
||||
|
|
@ -692,6 +693,42 @@ export async function checkRuntimeHealth(
|
|||
// Non-fatal — metrics check failed
|
||||
}
|
||||
|
||||
// ── Metrics ledger bloat ──────────────────────────────────────────────
|
||||
// The metrics ledger has no TTL and grows by one entry per completed unit.
|
||||
// At 50 units/day a project can accumulate tens of thousands of entries over
|
||||
// months of use. Prune to the newest 1500 when the threshold is exceeded.
|
||||
try {
|
||||
const metricsFilePath = join(root, "metrics.json");
|
||||
if (existsSync(metricsFilePath)) {
|
||||
try {
|
||||
const raw = readFileSync(metricsFilePath, "utf-8");
|
||||
const parsed = JSON.parse(raw);
|
||||
const BLOAT_UNITS_THRESHOLD = 2000;
|
||||
if (parsed.version === 1 && Array.isArray(parsed.units) && parsed.units.length > BLOAT_UNITS_THRESHOLD) {
|
||||
const fileSizeMB = (statSync(metricsFilePath).size / (1024 * 1024)).toFixed(1);
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "metrics_ledger_bloat",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `metrics.json has ${parsed.units.length} unit entries (${fileSizeMB}MB) — threshold is ${BLOAT_UNITS_THRESHOLD}. Run /gsd doctor --fix to prune to the newest 1500 entries.`,
|
||||
file: ".gsd/metrics.json",
|
||||
fixable: true,
|
||||
});
|
||||
if (shouldFix("metrics_ledger_bloat")) {
|
||||
const { pruneMetricsLedger } = await import("./metrics.js");
|
||||
const removed = pruneMetricsLedger(basePath, 1500);
|
||||
fixesApplied.push(`pruned metrics ledger: removed ${removed} oldest entries (${parsed.units.length - removed} remain)`);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// JSON parse failed — already handled by the integrity check above
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — metrics bloat check failed
|
||||
}
|
||||
|
||||
// ── Large planning file detection ──────────────────────────────────────
|
||||
// Files over 100KB can cause LLM context pressure. Report the worst offenders.
|
||||
try {
|
||||
|
|
@ -786,3 +823,85 @@ function buildStateMarkdownForCheck(state: Awaited<ReturnType<typeof deriveState
|
|||
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
// ── Global Health Checks ────────────────────────────────────────────────────
|
||||
// Cross-project checks that scan ~/.gsd/ rather than a specific project directory.
|
||||
|
||||
/**
|
||||
* Check for orphaned project state directories in ~/.gsd/projects/.
|
||||
*
|
||||
* A project directory is orphaned when its recorded gitRoot no longer exists
|
||||
* on disk — the repo was deleted, moved, or the external drive was unmounted.
|
||||
* These directories accumulate silently and waste disk space.
|
||||
*
|
||||
* Severity: info — orphaned state is harmless but takes disk space.
|
||||
* Fixable: yes — rmSync the directory. Never auto-fixed at fixLevel="task".
|
||||
*/
|
||||
export async function checkGlobalHealth(
|
||||
issues: DoctorIssue[],
|
||||
fixesApplied: string[],
|
||||
shouldFix: (code: DoctorIssueCode) => boolean,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const projectsDir = externalProjectsRoot();
|
||||
|
||||
if (!existsSync(projectsDir)) return;
|
||||
|
||||
let entries: string[];
|
||||
try {
|
||||
entries = readdirSync(projectsDir, { withFileTypes: true })
|
||||
.filter(e => e.isDirectory())
|
||||
.map(e => e.name);
|
||||
} catch {
|
||||
return; // Can't read directory — skip
|
||||
}
|
||||
|
||||
if (entries.length === 0) return;
|
||||
|
||||
const orphaned: Array<{ hash: string; gitRoot: string; remoteUrl: string }> = [];
|
||||
let unknownCount = 0;
|
||||
|
||||
for (const hash of entries) {
|
||||
const dirPath = join(projectsDir, hash);
|
||||
const meta = readRepoMeta(dirPath);
|
||||
if (!meta) {
|
||||
unknownCount++;
|
||||
continue;
|
||||
}
|
||||
if (!existsSync(meta.gitRoot)) {
|
||||
orphaned.push({ hash, gitRoot: meta.gitRoot, remoteUrl: meta.remoteUrl });
|
||||
}
|
||||
}
|
||||
|
||||
if (orphaned.length === 0) return;
|
||||
|
||||
const labels = orphaned.slice(0, 3).map(o => o.gitRoot).join(", ");
|
||||
const overflow = orphaned.length > 3 ? ` (+${orphaned.length - 3} more)` : "";
|
||||
const unknownNote = unknownCount > 0 ? ` — ${unknownCount} additional director${unknownCount === 1 ? "y" : "ies"} have no metadata yet (open those repos once to register them)` : "";
|
||||
|
||||
issues.push({
|
||||
severity: "info",
|
||||
code: "orphaned_project_state",
|
||||
scope: "project",
|
||||
unitId: "global",
|
||||
message: `${orphaned.length} orphaned GSD project state director${orphaned.length === 1 ? "y" : "ies"} in ${projectsDir} whose git root no longer exists: ${labels}${overflow}${unknownNote}. Run /gsd cleanup projects to audit or /gsd cleanup projects --fix to reclaim disk space.`,
|
||||
file: projectsDir,
|
||||
fixable: true,
|
||||
});
|
||||
|
||||
if (shouldFix("orphaned_project_state")) {
|
||||
let removed = 0;
|
||||
for (const { hash } of orphaned) {
|
||||
try {
|
||||
rmSync(join(projectsDir, hash), { recursive: true, force: true });
|
||||
removed++;
|
||||
} catch {
|
||||
// Individual removal failure is non-fatal — continue with remaining
|
||||
}
|
||||
}
|
||||
fixesApplied.push(`removed ${removed} orphaned project state director${removed === 1 ? "y" : "ies"} from ${projectsDir}`);
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — global health check must not block per-project doctor
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -62,6 +62,8 @@ export type DoctorIssueCode =
|
|||
| "stale_replan_file"
|
||||
| "future_timestamp"
|
||||
// Runtime data integrity
|
||||
| "orphaned_project_state"
|
||||
| "metrics_ledger_bloat"
|
||||
| "metrics_ledger_corrupt"
|
||||
| "large_planning_file"
|
||||
// Slow environment checks (opt-in via --build / --test flags)
|
||||
|
|
@ -81,6 +83,15 @@ export const COMPLETION_TRANSITION_CODES = new Set<DoctorIssueCode>([
|
|||
"all_tasks_done_roadmap_not_checked",
|
||||
]);
|
||||
|
||||
/**
|
||||
* Issue codes that represent global (cross-project) state.
|
||||
* These must NOT be auto-fixed when fixLevel is "task" — automated
|
||||
* post-task health checks must never delete external project state directories.
|
||||
*/
|
||||
export const GLOBAL_STATE_CODES = new Set<DoctorIssueCode>([
|
||||
"orphaned_project_state",
|
||||
]);
|
||||
|
||||
export interface DoctorIssue {
|
||||
severity: DoctorSeverity;
|
||||
code: DoctorIssueCode;
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@ import { invalidateAllCaches } from "./cache.js";
|
|||
import { loadEffectiveGSDPreferences, type GSDPreferences } from "./preferences.js";
|
||||
|
||||
import type { DoctorIssue, DoctorIssueCode, DoctorReport } from "./doctor-types.js";
|
||||
import { COMPLETION_TRANSITION_CODES } from "./doctor-types.js";
|
||||
import { COMPLETION_TRANSITION_CODES, GLOBAL_STATE_CODES } from "./doctor-types.js";
|
||||
import type { RoadmapSliceEntry } from "./types.js";
|
||||
import { checkGitHealth, checkRuntimeHealth } from "./doctor-checks.js";
|
||||
import { checkGitHealth, checkRuntimeHealth, checkGlobalHealth } from "./doctor-checks.js";
|
||||
import { checkEnvironmentHealth } from "./doctor-environment.js";
|
||||
import { runProviderChecks } from "./doctor-providers.js";
|
||||
|
||||
|
|
@ -476,6 +476,7 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
|
|||
const shouldFix = (code: DoctorIssueCode): boolean => {
|
||||
if (!fix || dryRun) return false;
|
||||
if (fixLevel === "task" && COMPLETION_TRANSITION_CODES.has(code)) return false;
|
||||
if (fixLevel === "task" && GLOBAL_STATE_CODES.has(code)) return false;
|
||||
return true;
|
||||
};
|
||||
|
||||
|
|
@ -515,6 +516,9 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
|
|||
await checkRuntimeHealth(basePath, issues, fixesApplied, shouldFix);
|
||||
const runtimeMs = Date.now() - t0runtime;
|
||||
|
||||
// Global health checks — cross-project state (e.g. orphaned project state dirs)
|
||||
await checkGlobalHealth(issues, fixesApplied, shouldFix);
|
||||
|
||||
// Environment health checks — timed
|
||||
const t0env = Date.now();
|
||||
await checkEnvironmentHealth(basePath, issues, {
|
||||
|
|
|
|||
|
|
@ -517,6 +517,31 @@ function defaultLedger(): MetricsLedger {
|
|||
return { version: 1, projectStartedAt: Date.now(), units: [] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Prune the metrics ledger to at most `keepCount` most-recent unit entries.
|
||||
*
|
||||
* Called by the doctor when the ledger exceeds the bloat threshold.
|
||||
* Keeps the newest entries (highest index = most recent) and discards
|
||||
* the oldest from the head of the array. Preserves `projectStartedAt`.
|
||||
*
|
||||
* Updates both the on-disk file and the in-memory ledger if it is loaded,
|
||||
* so the current session sees the pruned state immediately.
|
||||
*
|
||||
* @returns the number of entries removed, or 0 if no pruning was needed.
|
||||
*/
|
||||
export function pruneMetricsLedger(base: string, keepCount: number): number {
|
||||
const disk = loadLedgerFromDisk(base);
|
||||
if (!disk || disk.units.length <= keepCount) return 0;
|
||||
const removed = disk.units.length - keepCount;
|
||||
disk.units = disk.units.slice(-keepCount);
|
||||
saveJsonFile(metricsPath(base), disk);
|
||||
// Keep the in-memory ledger in sync if it is loaded for this session.
|
||||
if (ledger) {
|
||||
ledger.units = ledger.units.slice(-keepCount);
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load ledger from disk without initializing in-memory state.
|
||||
* Used by history/export commands outside of auto-mode.
|
||||
|
|
|
|||
|
|
@ -8,12 +8,93 @@
|
|||
|
||||
import { createHash } from "node:crypto";
|
||||
import { execFileSync } from "node:child_process";
|
||||
import { existsSync, lstatSync, mkdirSync, readFileSync, realpathSync, rmSync, symlinkSync } from "node:fs";
|
||||
import { existsSync, lstatSync, mkdirSync, readFileSync, realpathSync, rmSync, symlinkSync, writeFileSync } from "node:fs";
|
||||
import { homedir } from "node:os";
|
||||
import { join, resolve } from "node:path";
|
||||
import { basename, join, resolve } from "node:path";
|
||||
|
||||
const gsdHome = process.env.GSD_HOME || join(homedir(), ".gsd");
|
||||
|
||||
// ─── Repo Metadata ───────────────────────────────────────────────────────────
|
||||
|
||||
export interface RepoMeta {
|
||||
version: number;
|
||||
hash: string;
|
||||
gitRoot: string;
|
||||
remoteUrl: string;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
function isRepoMeta(value: unknown): value is RepoMeta {
|
||||
if (!value || typeof value !== "object") return false;
|
||||
const v = value as Record<string, unknown>;
|
||||
return typeof v.version === "number"
|
||||
&& typeof v.hash === "string"
|
||||
&& typeof v.gitRoot === "string"
|
||||
&& typeof v.remoteUrl === "string"
|
||||
&& typeof v.createdAt === "string";
|
||||
}
|
||||
|
||||
/**
|
||||
* Write (or refresh) repo metadata into the external state directory.
|
||||
* Called on open so metadata tracks repo path moves while keeping createdAt stable.
|
||||
* Non-fatal: a metadata write failure must never block project setup.
|
||||
*/
|
||||
function writeRepoMeta(externalPath: string, remoteUrl: string, gitRoot: string): void {
|
||||
const metaPath = join(externalPath, "repo-meta.json");
|
||||
try {
|
||||
let createdAt = new Date().toISOString();
|
||||
let existing: RepoMeta | null = null;
|
||||
if (existsSync(metaPath)) {
|
||||
try {
|
||||
const parsed = JSON.parse(readFileSync(metaPath, "utf-8"));
|
||||
if (isRepoMeta(parsed)) {
|
||||
existing = parsed;
|
||||
createdAt = parsed.createdAt;
|
||||
// Fast path: nothing changed.
|
||||
if (
|
||||
parsed.version === 1
|
||||
&& parsed.hash === basename(externalPath)
|
||||
&& parsed.gitRoot === gitRoot
|
||||
&& parsed.remoteUrl === remoteUrl
|
||||
) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Fall through and rewrite invalid metadata.
|
||||
}
|
||||
}
|
||||
|
||||
const meta: RepoMeta = {
|
||||
version: 1,
|
||||
hash: basename(externalPath),
|
||||
gitRoot,
|
||||
remoteUrl,
|
||||
createdAt,
|
||||
};
|
||||
// Keep file format stable even when refreshing.
|
||||
writeFileSync(metaPath, JSON.stringify(meta, null, 2) + "\n", "utf-8");
|
||||
} catch {
|
||||
// Non-fatal — metadata write failure should not block project setup
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read repo metadata from the external state directory.
|
||||
* Returns null if the file doesn't exist or can't be parsed.
|
||||
*/
|
||||
export function readRepoMeta(externalPath: string): RepoMeta | null {
|
||||
const metaPath = join(externalPath, "repo-meta.json");
|
||||
try {
|
||||
if (!existsSync(metaPath)) return null;
|
||||
const raw = readFileSync(metaPath, "utf-8");
|
||||
const parsed = JSON.parse(raw);
|
||||
return isRepoMeta(parsed) ? parsed : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Repo Identity ──────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
|
|
@ -136,6 +217,15 @@ export function externalGsdRoot(basePath: string): string {
|
|||
return join(base, "projects", repoIdentity(basePath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the root directory that stores project-scoped external state.
|
||||
* Honors GSD_STATE_DIR override before falling back to GSD_HOME.
|
||||
*/
|
||||
export function externalProjectsRoot(): string {
|
||||
const base = process.env.GSD_STATE_DIR || gsdHome;
|
||||
return join(base, "projects");
|
||||
}
|
||||
|
||||
// ─── Symlink Management ─────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
|
|
@ -156,6 +246,9 @@ export function ensureGsdSymlink(projectPath: string): string {
|
|||
// Ensure external directory exists
|
||||
mkdirSync(externalPath, { recursive: true });
|
||||
|
||||
// Write repo metadata once so cleanup commands can identify this directory later.
|
||||
writeRepoMeta(externalPath, getRemoteUrl(projectPath), resolveGitRoot(projectPath));
|
||||
|
||||
const replaceWithSymlink = (): string => {
|
||||
rmSync(localGsd, { recursive: true, force: true });
|
||||
symlinkSync(externalPath, localGsd, "junction");
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
import { mkdtempSync, rmSync, writeFileSync, existsSync, lstatSync, realpathSync, mkdirSync, symlinkSync } from "node:fs";
|
||||
import { mkdtempSync, rmSync, writeFileSync, existsSync, lstatSync, realpathSync, mkdirSync, symlinkSync, renameSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
import { execSync } from "node:child_process";
|
||||
|
||||
import { repoIdentity, externalGsdRoot, ensureGsdSymlink, validateProjectId } from "../repo-identity.ts";
|
||||
import { repoIdentity, externalGsdRoot, ensureGsdSymlink, validateProjectId, readRepoMeta } from "../repo-identity.ts";
|
||||
import { createTestContext } from "./test-helpers.ts";
|
||||
|
||||
const { assertEq, assertTrue, report } = createTestContext();
|
||||
|
|
@ -68,6 +68,44 @@ async function main(): Promise<void> {
|
|||
const hashIdentity = repoIdentity(base);
|
||||
assertTrue(/^[0-9a-f]{12}$/.test(hashIdentity), "repoIdentity returns 12-char hex hash when GSD_PROJECT_ID is unset");
|
||||
|
||||
console.log("\n=== readRepoMeta returns null for malformed metadata ===");
|
||||
{
|
||||
const malformedPath = join(stateDir, "projects", "malformed");
|
||||
mkdirSync(malformedPath, { recursive: true });
|
||||
writeFileSync(join(malformedPath, "repo-meta.json"), JSON.stringify({ version: 1 }) + "\n", "utf-8");
|
||||
assertEq(readRepoMeta(malformedPath), null, "malformed repo-meta.json is treated as unknown metadata");
|
||||
}
|
||||
|
||||
console.log("\n=== ensureGsdSymlink refreshes repo-meta gitRoot after repo move with fixed project id ===");
|
||||
{
|
||||
const moveRepo = realpathSync(mkdtempSync(join(tmpdir(), "gsd-repo-identity-move-")));
|
||||
run("git init -b main", moveRepo);
|
||||
run('git config user.name "Pi Test"', moveRepo);
|
||||
run('git config user.email "pi@example.com"', moveRepo);
|
||||
writeFileSync(join(moveRepo, "README.md"), "# Move Test Repo\n", "utf-8");
|
||||
run("git add README.md", moveRepo);
|
||||
run('git commit -m "chore: init move repo"', moveRepo);
|
||||
|
||||
process.env.GSD_PROJECT_ID = "fixed-project";
|
||||
const fixedExternal = ensureGsdSymlink(moveRepo);
|
||||
const before = readRepoMeta(fixedExternal);
|
||||
assertTrue(before !== null, "repo metadata exists before repo move");
|
||||
assertEq(before!.gitRoot, realpathSync(moveRepo), "repo metadata tracks current git root before move");
|
||||
|
||||
const movedBase = join(tmpdir(), `gsd-repo-identity-moved-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
||||
renameSync(moveRepo, movedBase);
|
||||
const movedExternal = ensureGsdSymlink(movedBase);
|
||||
assertEq(realpathSync(movedExternal), realpathSync(fixedExternal), "fixed project id keeps the same external state dir");
|
||||
|
||||
const after = readRepoMeta(movedExternal);
|
||||
assertTrue(after !== null, "repo metadata exists after repo move");
|
||||
assertEq(after!.gitRoot, realpathSync(movedBase), "repo metadata gitRoot is refreshed to moved repo path");
|
||||
assertEq(after!.createdAt, before!.createdAt, "repo metadata preserves createdAt on refresh");
|
||||
|
||||
rmSync(movedBase, { recursive: true, force: true });
|
||||
delete process.env.GSD_PROJECT_ID;
|
||||
}
|
||||
|
||||
console.log("\n=== validateProjectId rejects invalid values ===");
|
||||
for (const invalid of ["has spaces", "path/traversal", "dot..dot", "back\\slash"]) {
|
||||
assertTrue(!validateProjectId(invalid), `validateProjectId rejects invalid value: "${invalid}"`);
|
||||
|
|
@ -78,6 +116,7 @@ async function main(): Promise<void> {
|
|||
assertTrue(validateProjectId(valid), `validateProjectId accepts valid value: "${valid}"`);
|
||||
}
|
||||
} finally {
|
||||
delete process.env.GSD_PROJECT_ID;
|
||||
delete process.env.GSD_STATE_DIR;
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
rmSync(stateDir, { recursive: true, force: true });
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
/**
|
||||
* worktree-sync-milestones.test.ts — Regression test for #1311.
|
||||
* worktree-sync-milestones.test.ts — Regression tests for #1311 and #1678.
|
||||
*
|
||||
* Verifies that syncProjectRootToWorktree copies milestone artifacts
|
||||
* from the main repo's .gsd/ into the worktree's .gsd/ for the
|
||||
* specified milestone, and deletes gsd.db so it rebuilds from fresh state.
|
||||
*
|
||||
* Also verifies that syncWorktreeStateBack recurses into tasks/ subdirectories
|
||||
* so task-level summaries are not dropped on milestone teardown (#1678).
|
||||
*
|
||||
* Covers:
|
||||
* - Milestone directory synced from main to worktree
|
||||
* - Missing slices within a milestone are synced
|
||||
|
|
@ -12,6 +15,7 @@
|
|||
* - No-op when paths are equal
|
||||
* - No-op when milestoneId is null
|
||||
* - Non-existent directories handled gracefully
|
||||
* - syncWorktreeStateBack recurses into tasks/ subdirectory (#1678)
|
||||
*/
|
||||
|
||||
import { mkdtempSync, mkdirSync, writeFileSync, rmSync, existsSync } from 'node:fs';
|
||||
|
|
@ -19,7 +23,7 @@ import { join } from 'node:path';
|
|||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { syncProjectRootToWorktree } from '../auto-worktree-sync.ts';
|
||||
import { syncGsdStateToWorktree } from '../auto-worktree.ts';
|
||||
import { syncGsdStateToWorktree, syncWorktreeStateBack } from '../auto-worktree.ts';
|
||||
import { createTestContext } from './test-helpers.ts';
|
||||
|
||||
const { assertTrue, report } = createTestContext();
|
||||
|
|
@ -180,6 +184,51 @@ async function main(): Promise<void> {
|
|||
}
|
||||
}
|
||||
|
||||
// ─── 8. syncWorktreeStateBack recurses into tasks/ (#1678) ───────────
|
||||
console.log('\n=== 8. syncWorktreeStateBack copies tasks/ subdirectory (#1678) ===');
|
||||
{
|
||||
const mainBase = mkdtempSync(join(tmpdir(), 'gsd-wt-back-main-'));
|
||||
const wtBase = mkdtempSync(join(tmpdir(), 'gsd-wt-back-wt-'));
|
||||
|
||||
try {
|
||||
// Build worktree milestone structure with slice-level and task-level files
|
||||
const wtSliceDir = join(wtBase, '.gsd', 'milestones', 'M001', 'slices', 'S01');
|
||||
const wtTasksDir = join(wtSliceDir, 'tasks');
|
||||
mkdirSync(wtTasksDir, { recursive: true });
|
||||
writeFileSync(join(wtSliceDir, 'S01-SUMMARY.md'), '# S01 Summary');
|
||||
writeFileSync(join(wtTasksDir, 'T01-SUMMARY.md'), '# T01 Summary');
|
||||
writeFileSync(join(wtTasksDir, 'T02-SUMMARY.md'), '# T02 Summary');
|
||||
|
||||
// Main project root starts with only the milestone directory (no slices yet)
|
||||
mkdirSync(join(mainBase, '.gsd', 'milestones', 'M001'), { recursive: true });
|
||||
|
||||
const { synced } = syncWorktreeStateBack(mainBase, wtBase, 'M001');
|
||||
|
||||
const mainSliceDir = join(mainBase, '.gsd', 'milestones', 'M001', 'slices', 'S01');
|
||||
const mainTasksDir = join(mainSliceDir, 'tasks');
|
||||
|
||||
assertTrue(
|
||||
existsSync(join(mainSliceDir, 'S01-SUMMARY.md')),
|
||||
'#1678: slice SUMMARY synced to project root',
|
||||
);
|
||||
assertTrue(
|
||||
existsSync(join(mainTasksDir, 'T01-SUMMARY.md')),
|
||||
'#1678: task T01-SUMMARY synced to project root',
|
||||
);
|
||||
assertTrue(
|
||||
existsSync(join(mainTasksDir, 'T02-SUMMARY.md')),
|
||||
'#1678: task T02-SUMMARY synced to project root',
|
||||
);
|
||||
assertTrue(
|
||||
synced.some((p) => p.includes('tasks/T01-SUMMARY.md')),
|
||||
'#1678: task summary appears in synced list',
|
||||
);
|
||||
} finally {
|
||||
rmSync(mainBase, { recursive: true, force: true });
|
||||
rmSync(wtBase, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
report();
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue