feat(gsd): add workflow-logger and wire into engine, tool, manifest, reconcile paths (#2494)

* feat(gsd): add workflow-logger for structured operational error/warning accumulation

Adds workflow-logger.ts — a centralized in-memory accumulator for operational
warnings and errors across the GSD engine pipeline.

Key additions vs the standalone/workflow-logger branch:
- Fix hasWarnings() to filter severity === "warn" (was returning _buffer.length > 0,
  incorrectly returning true for error-only buffers)
- Add hasAnyIssues() for callers that want to check for either severity
- Add drainAndSummarize() atomic helper to prevent the drain-before-summarize footgun
- Document singleton safety requirement: callers must _resetLogs() per unit
- Document always-on stderr policy (intentional, unlike debug-logger opt-in)
- Move test from engine/ to tests/ to match project test discovery glob
- Expand test suite from 15 to 32 cases: stderr output, context handling,
  hasWarnings with errors-only buffer, drainAndSummarize, double-drain,
  warnings-only summarize, formatForNotification context exclusion,
  buffer limit robustness, ISO timestamp validation

* feat(gsd): wire workflow-logger into engine, tool, manifest, and reconcile paths

Routes 34 previously silent/raw-stderr error and warning sites through the
structured workflow-logger so the auto-loop can drain and surface root causes.

Changes by component:

  tool (12 sites) — bootstrap/db-tools.ts
    All 12 gsd_* tool handler catch blocks replaced from process.stderr.write
    to logError("tool", ...) with { tool, error } context.

  engine (9 sites) — auto/phases.ts (7), auto/run-unit.ts (2)
    7 silent catches in phases.ts annotated with logWarning("engine", ...):
    health gate, milestone merge, completed-units archive, STATE.md rebuild,
    baseline char count, prompt reorder failure, disk flush.
    2 silent catches in run-unit.ts: chdir and clearQueue failures.

  manifest (8 sites) — db-writer.ts
    nextDecisionId, saveDecisionToDb, updateRequirementInDb, saveArtifactToDb
    error paths replaced with logError("manifest", ...).
    Shrinkage guard replaced with logWarning("manifest", ...).

  reconcile (5 sites) — auto-worktree.ts (2), worktree-manager.ts (3)
    Post-create hook failure, teardown directory persistence, stale worktree
    removal, submodule stash, stash failure — all replaced with
    logWarning("reconcile", ...) with { worktree } context.

No control flow changed. TypeScript clean. 32/32 tests pass.

* fix(gsd): use info.name instead of global name in auto-worktree logWarning call
This commit is contained in:
Jeremy McSpadden 2026-03-25 09:43:20 -05:00 committed by GitHub
parent 51519e6cda
commit bc7669bf0f
8 changed files with 524 additions and 58 deletions

View file

@ -42,6 +42,7 @@ import {
} from "./worktree.js";
import { MergeConflictError, readIntegrationBranch, RUNTIME_EXCLUSION_PATHS } from "./git-service.js";
import { debugLog } from "./debug-logger.js";
import { logWarning } from "./workflow-logger.js";
import { loadEffectiveGSDPreferences } from "./preferences.js";
import {
nativeGetCurrentBranch,
@ -700,7 +701,7 @@ export function createAutoWorktree(
const hookError = runWorktreePostCreateHook(basePath, info.path);
if (hookError) {
// Non-fatal — log but don't prevent worktree usage
console.error(`[GSD] ${hookError}`);
logWarning("reconcile", hookError, { worktree: info.name });
}
const previousCwd = process.cwd();
@ -793,10 +794,12 @@ export function teardownAutoWorktree(
// backslashes (#1436), leaving ~1 GB+ orphaned directories.
const wtDir = worktreePath(originalBasePath, milestoneId);
if (existsSync(wtDir)) {
console.error(
`[GSD] WARNING: Worktree directory still exists after teardown: ${wtDir}\n` +
` This is likely an orphaned directory consuming disk space.\n` +
` Remove it manually with: rm -rf "${wtDir.replaceAll("\\", "/")}"`,
logWarning(
"reconcile",
`Worktree directory still exists after teardown: ${wtDir}. ` +
`This is likely an orphaned directory consuming disk space. ` +
`Remove it manually with: rm -rf "${wtDir.replaceAll("\\", "/")}"`,
{ worktree: milestoneId },
);
// Attempt a direct filesystem removal as a fallback
try {

View file

@ -30,6 +30,7 @@ import { PROJECT_FILES } from "../detection.js";
import { MergeConflictError } from "../git-service.js";
import { join } from "node:path";
import { existsSync, cpSync } from "node:fs";
import { logWarning, logError } from "../workflow-logger.js";
// ─── generateMilestoneReport ──────────────────────────────────────────────────
@ -164,8 +165,8 @@ export async function runPreDispatch(
debugLog("autoLoop", { phase: "exit", reason: "health-gate-failed" });
return { action: "break", reason: "health-gate-failed" };
}
} catch {
// Non-fatal
} catch (e) {
logWarning("engine", "Pre-dispatch health gate threw unexpectedly", { error: String(e) });
}
// Sync project root artifacts into worktree
@ -247,7 +248,8 @@ export async function runPreDispatch(
await deps.stopAuto(ctx, pi, `Merge conflict on milestone ${s.currentMilestoneId}`);
return { action: "break", reason: "merge-conflict" };
}
// Non-conflict errors — log and continue
// Non-conflict merge errors — log and continue
logWarning("engine", "Milestone merge failed with non-conflict error", { milestone: s.currentMilestoneId!, error: String(mergeErr) });
}
// PR creation (auto_pr) is handled inside mergeMilestoneToMain (#2302)
@ -290,7 +292,9 @@ export async function runPreDispatch(
cpSync(completedKeysPath, archivePath);
}
atomicWriteSync(completedKeysPath, JSON.stringify([], null, 2));
} catch { /* non-fatal */ }
} catch (e) {
logWarning("engine", "Failed to archive completed-units on milestone transition", { error: String(e) });
}
// Rebuild STATE.md immediately so it reflects the new active milestone.
// This bypasses the 30-second throttle in the normal rebuild path —
@ -298,8 +302,8 @@ export async function runPreDispatch(
// immediate write.
try {
await deps.rebuildState(s.basePath);
} catch {
// Non-fatal — STATE.md will be rebuilt on the next regular cycle
} catch (e) {
logWarning("engine", "STATE.md rebuild failed after milestone transition", { error: String(e) });
}
}
@ -919,8 +923,8 @@ export async function runUnitPhase(
(decisionsContent?.length ?? 0) +
(requirementsContent?.length ?? 0) +
(projectContent?.length ?? 0);
} catch {
// Non-fatal
} catch (e) {
logWarning("engine", "Baseline char count measurement failed", { error: String(e) });
}
}
@ -930,9 +934,7 @@ export async function runUnitPhase(
} catch (reorderErr) {
const msg =
reorderErr instanceof Error ? reorderErr.message : String(reorderErr);
process.stderr.write(
`[gsd] prompt reorder failed (non-fatal): ${msg}\n`,
);
logWarning("engine", "Prompt reorder failed", { error: msg });
}
// Select and apply model (with tier escalation on retry — normal units only)
@ -1135,7 +1137,9 @@ export async function runUnitPhase(
const completedKeysPath = join(gsdRoot(s.basePath), "completed-units.json");
const keys = s.completedUnits.map((u) => `${u.type}/${u.id}`);
atomicWriteSync(completedKeysPath, JSON.stringify(keys, null, 2));
} catch { /* non-fatal: disk flush failure */ }
} catch (e) {
logWarning("engine", "Failed to flush completed-units to disk", { error: String(e) });
}
deps.clearUnitRuntimeRecord(s.basePath, unitType, unitId);
s.unitDispatchCount.delete(`${unitType}/${unitId}`);

View file

@ -11,6 +11,7 @@ import { NEW_SESSION_TIMEOUT_MS } from "./session.js";
import type { UnitResult } from "./types.js";
import { _setCurrentResolve, _setSessionSwitchInFlight } from "./resolve.js";
import { debugLog } from "../debug-logger.js";
import { logWarning, logError } from "../workflow-logger.js";
/**
* Execute a single unit: create a new session, send the prompt, and await
@ -85,7 +86,9 @@ export async function runUnit(
if (process.cwd() !== s.basePath) {
process.chdir(s.basePath);
}
} catch { /* non-fatal — chdir may fail if dir was removed */ }
} catch (e) {
logWarning("engine", "Failed to chdir to basePath before dispatch", { basePath: s.basePath, error: String(e) });
}
// ── Send the prompt ──
debugLog("runUnit", { phase: "send-message", unitType, unitId });
@ -115,8 +118,8 @@ export async function runUnit(
if (typeof cmdCtxAny?.clearQueue === "function") {
(cmdCtxAny.clearQueue as () => unknown)();
}
} catch {
// Non-fatal — clearQueue may not be available in all contexts
} catch (e) {
logWarning("engine", "clearQueue failed after unit completion", { error: String(e) });
}
return result;

View file

@ -6,6 +6,7 @@ import { findMilestoneIds, nextMilestoneId, claimReservedId, getReservedMileston
import { loadEffectiveGSDPreferences } from "../preferences.js";
import { ensureDbOpen } from "./dynamic-tools.js";
import { StringEnum } from "@gsd/pi-ai";
import { logError } from "../workflow-logger.js";
/**
* Register an alias tool that shares the same execute function as its canonical counterpart.
@ -52,7 +53,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: gsd_decision_save tool failed: ${msg}\n`);
logError("tool", `gsd_decision_save tool failed: ${msg}`, { tool: "gsd_decision_save", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error saving decision: ${msg}` }],
details: { operation: "save_decision", error: msg } as any,
@ -143,7 +144,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: gsd_requirement_update tool failed: ${msg}\n`);
logError("tool", `gsd_requirement_update tool failed: ${msg}`, { tool: "gsd_requirement_update", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error updating requirement: ${msg}` }],
details: { operation: "update_requirement", id: params.id, error: msg } as any,
@ -239,7 +240,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: gsd_summary_save tool failed: ${msg}\n`);
logError("tool", `gsd_summary_save tool failed: ${msg}`, { tool: "gsd_summary_save", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error saving artifact: ${msg}` }],
details: { operation: "save_summary", error: msg } as any,
@ -402,7 +403,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: plan_milestone tool failed: ${msg}\n`);
logError("tool", `plan_milestone tool failed: ${msg}`, { tool: "gsd_plan_milestone", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error planning milestone: ${msg}` }],
details: { operation: "plan_milestone", error: msg } as any,
@ -495,7 +496,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: plan_slice tool failed: ${msg}\n`);
logError("tool", `plan_slice tool failed: ${msg}`, { tool: "gsd_plan_slice", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error planning slice: ${msg}` }],
details: { operation: "plan_slice", error: msg } as any,
@ -572,7 +573,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: plan_task tool failed: ${msg}\n`);
logError("tool", `plan_task tool failed: ${msg}`, { tool: "gsd_plan_task", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error planning task: ${msg}` }],
details: { operation: "plan_task", error: msg } as any,
@ -642,7 +643,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: complete_task tool failed: ${msg}\n`);
logError("tool", `complete_task tool failed: ${msg}`, { tool: "gsd_task_complete", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error completing task: ${msg}` }],
details: { operation: "complete_task", error: msg } as any,
@ -723,7 +724,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: complete_slice tool failed: ${msg}\n`);
logError("tool", `complete_slice tool failed: ${msg}`, { tool: "gsd_slice_complete", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error completing slice: ${msg}` }],
details: { operation: "complete_slice", error: msg } as any,
@ -834,7 +835,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: complete_milestone tool failed: ${msg}\n`);
logError("tool", `complete_milestone tool failed: ${msg}`, { tool: "gsd_complete_milestone", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error completing milestone: ${msg}` }],
details: { operation: "complete_milestone", error: msg } as any,
@ -904,7 +905,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: validate_milestone tool failed: ${msg}\n`);
logError("tool", `validate_milestone tool failed: ${msg}`, { tool: "gsd_validate_milestone", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error validating milestone: ${msg}` }],
details: { operation: "validate_milestone", error: msg } as any,
@ -973,7 +974,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: replan_slice tool failed: ${msg}\n`);
logError("tool", `replan_slice tool failed: ${msg}`, { tool: "gsd_replan_slice", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error replanning slice: ${msg}` }],
details: { operation: "replan_slice", error: msg } as any,
@ -1053,7 +1054,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
};
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
process.stderr.write(`gsd-db: reassess_roadmap tool failed: ${msg}\n`);
logError("tool", `reassess_roadmap tool failed: ${msg}`, { tool: "gsd_reassess_roadmap", error: String(err) });
return {
content: [{ type: "text" as const, text: `Error reassessing roadmap: ${msg}` }],
details: { operation: "reassess_roadmap", error: msg } as any,

View file

@ -14,6 +14,7 @@ import type { Decision, Requirement } from './types.js';
import { resolveGsdRootFile } from './paths.js';
import { saveFile } from './files.js';
import { GSDError, GSD_STALE_STATE, GSD_IO_ERROR } from './errors.js';
import { logWarning, logError } from './workflow-logger.js';
import { invalidateStateCache } from './state.js';
import { clearPathCache } from './paths.js';
import { clearParseCache } from './files.js';
@ -221,7 +222,7 @@ export async function nextDecisionId(): Promise<string> {
const next = maxNum + 1;
return `D${String(next).padStart(3, '0')}`;
} catch (err) {
process.stderr.write(`gsd-db: nextDecisionId failed: ${(err as Error).message}\n`);
logError('manifest', 'nextDecisionId failed', { fn: 'nextDecisionId', error: String((err as Error).message) });
return 'D001';
}
}
@ -311,9 +312,7 @@ export async function saveDecisionToDb(
try {
await saveFile(filePath, md);
} catch (diskErr) {
process.stderr.write(
`gsd-db: saveDecisionToDb — disk write failed, rolling back DB row: ${(diskErr as Error).message}\n`,
);
logError('manifest', 'disk write failed, rolling back DB row', { fn: 'saveDecisionToDb', error: String((diskErr as Error).message) });
adapter?.prepare('DELETE FROM decisions WHERE id = :id').run({ ':id': id });
throw diskErr;
}
@ -325,7 +324,7 @@ export async function saveDecisionToDb(
return { id };
} catch (err) {
process.stderr.write(`gsd-db: saveDecisionToDb failed: ${(err as Error).message}\n`);
logError('manifest', 'saveDecisionToDb failed', { fn: 'saveDecisionToDb', error: String((err as Error).message) });
throw err;
}
}
@ -388,9 +387,7 @@ export async function updateRequirementInDb(
try {
await saveFile(filePath, md);
} catch (diskErr) {
process.stderr.write(
`gsd-db: updateRequirementInDb — disk write failed, reverting DB row: ${(diskErr as Error).message}\n`,
);
logError('manifest', 'disk write failed, reverting DB row', { fn: 'updateRequirementInDb', error: String((diskErr as Error).message) });
db.upsertRequirement(existing);
throw diskErr;
}
@ -400,7 +397,7 @@ export async function updateRequirementInDb(
clearPathCache();
clearParseCache();
} catch (err) {
process.stderr.write(`gsd-db: updateRequirementInDb failed: ${(err as Error).message}\n`);
logError('manifest', 'updateRequirementInDb failed', { fn: 'updateRequirementInDb', error: String((err as Error).message) });
throw err;
}
}
@ -444,10 +441,7 @@ export async function saveArtifactToDb(
const existingSize = statSync(fullPath).size;
const newSize = Buffer.byteLength(opts.content, 'utf-8');
if (existingSize > 0 && newSize < existingSize * 0.5) {
process.stderr.write(
`gsd-db: saveArtifactToDb — new content (${newSize}B) is <50% of existing file ` +
`(${existingSize}B) at ${opts.path}. Preserving disk file to prevent data loss.\n`,
);
logWarning('manifest', `new content (${newSize}B) is <50% of existing file (${existingSize}B), preserving disk file`, { fn: 'saveArtifactToDb', path: opts.path });
dbContent = readFileSync(fullPath, 'utf-8');
skipDiskWrite = true;
}
@ -467,9 +461,7 @@ export async function saveArtifactToDb(
try {
await saveFile(fullPath, opts.content);
} catch (diskErr) {
process.stderr.write(
`gsd-db: saveArtifactToDb — disk write failed, rolling back DB row: ${(diskErr as Error).message}\n`,
);
logError('manifest', 'disk write failed, rolling back DB row', { fn: 'saveArtifactToDb', error: String((diskErr as Error).message) });
const rollbackAdapter = db._getAdapter();
rollbackAdapter?.prepare('DELETE FROM artifacts WHERE path = :path').run({ ':path': opts.path });
throw diskErr;
@ -481,7 +473,7 @@ export async function saveArtifactToDb(
clearPathCache();
clearParseCache();
} catch (err) {
process.stderr.write(`gsd-db: saveArtifactToDb failed: ${(err as Error).message}\n`);
logError('manifest', 'saveArtifactToDb failed', { fn: 'saveArtifactToDb', error: String((err as Error).message) });
throw err;
}
}

View file

@ -0,0 +1,275 @@
// GSD Extension — Workflow Logger Tests
// Tests for the centralized warning/error accumulator.
import { describe, test, beforeEach } from "node:test";
import assert from "node:assert/strict";
import {
logWarning,
logError,
drainLogs,
drainAndSummarize,
peekLogs,
hasErrors,
hasWarnings,
hasAnyIssues,
summarizeLogs,
formatForNotification,
_resetLogs,
} from "../workflow-logger.ts";
const ISO_RE = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/;
describe("workflow-logger", () => {
beforeEach(() => {
_resetLogs();
});
describe("accumulation", () => {
test("logWarning adds an entry with severity warn", () => {
logWarning("engine", "test warning");
const entries = peekLogs();
assert.equal(entries.length, 1);
assert.equal(entries[0].severity, "warn");
assert.equal(entries[0].component, "engine");
assert.equal(entries[0].message, "test warning");
assert.match(entries[0].ts, ISO_RE);
});
test("logError adds an entry with severity error", () => {
logError("intercept", "blocked write", { path: "/foo/STATE.md" });
const entries = peekLogs();
assert.equal(entries.length, 1);
assert.equal(entries[0].severity, "error");
assert.equal(entries[0].component, "intercept");
assert.deepEqual(entries[0].context, { path: "/foo/STATE.md" });
});
test("accumulates multiple entries in order", () => {
logWarning("projection", "render failed");
logError("intercept", "blocked write");
logWarning("manifest", "write failed");
assert.equal(peekLogs().length, 3);
assert.equal(peekLogs()[0].component, "projection");
assert.equal(peekLogs()[1].component, "intercept");
assert.equal(peekLogs()[2].component, "manifest");
});
test("omits context field when not provided", () => {
logWarning("engine", "no context");
assert.equal("context" in peekLogs()[0], false);
});
test("omits context field when undefined is passed", () => {
logWarning("engine", "no context", undefined);
assert.equal("context" in peekLogs()[0], false);
});
test("context with special characters is stored as-is", () => {
logError("tool", "failed", { path: '/foo/"quoted".md', msg: "line1\nline2" });
assert.deepEqual(peekLogs()[0].context, {
path: '/foo/"quoted".md',
msg: "line1\nline2",
});
});
test("ts field is a valid ISO 8601 timestamp", () => {
logWarning("engine", "ts check");
assert.match(peekLogs()[0].ts, ISO_RE);
});
});
describe("drain", () => {
test("returns all entries and clears buffer", () => {
logWarning("engine", "w1");
logError("engine", "e1");
const drained = drainLogs();
assert.equal(drained.length, 2);
assert.equal(peekLogs().length, 0);
});
test("returns empty array when no entries", () => {
assert.deepEqual(drainLogs(), []);
});
test("second drain returns empty array", () => {
logWarning("engine", "w1");
drainLogs();
assert.deepEqual(drainLogs(), []);
});
});
describe("drainAndSummarize", () => {
test("returns summary and clears buffer atomically", () => {
logError("intercept", "blocked");
logWarning("projection", "render failed");
const { logs, summary } = drainAndSummarize();
assert.equal(logs.length, 2);
assert.equal(peekLogs().length, 0);
assert.ok(summary?.includes("1 error(s)"));
assert.ok(summary?.includes("1 warning(s)"));
});
test("returns null summary when buffer is empty", () => {
const { logs, summary } = drainAndSummarize();
assert.deepEqual(logs, []);
assert.equal(summary, null);
});
});
describe("hasErrors / hasWarnings / hasAnyIssues", () => {
test("hasErrors returns false when only warnings", () => {
logWarning("engine", "just a warning");
assert.equal(hasErrors(), false);
assert.equal(hasWarnings(), true);
});
test("hasErrors returns true when errors present", () => {
logWarning("engine", "warning");
logError("intercept", "error");
assert.equal(hasErrors(), true);
});
test("hasWarnings returns false when buffer empty", () => {
assert.equal(hasWarnings(), false);
});
test("hasWarnings returns false when buffer contains only errors", () => {
logError("intercept", "only an error");
assert.equal(hasWarnings(), false);
assert.equal(hasErrors(), true);
});
test("hasAnyIssues returns true for warnings only", () => {
logWarning("engine", "warn");
assert.equal(hasAnyIssues(), true);
});
test("hasAnyIssues returns true for errors only", () => {
logError("engine", "err");
assert.equal(hasAnyIssues(), true);
});
test("hasAnyIssues returns false when buffer empty", () => {
assert.equal(hasAnyIssues(), false);
});
});
describe("summarizeLogs", () => {
test("returns null when empty", () => {
assert.equal(summarizeLogs(), null);
});
test("summarizes errors and warnings separately", () => {
logError("intercept", "blocked STATE.md");
logWarning("projection", "render failed");
logWarning("manifest", "write failed");
const summary = summarizeLogs()!;
assert.ok(summary.includes("1 error(s)"));
assert.ok(summary.includes("blocked STATE.md"));
assert.ok(summary.includes("2 warning(s)"));
});
test("only shows errors section when no warnings", () => {
logError("intercept", "blocked");
const summary = summarizeLogs()!;
assert.ok(summary.includes("1 error(s)"));
assert.ok(!summary.includes("warning"));
});
test("only shows warnings section when no errors", () => {
logWarning("projection", "render degraded");
logWarning("manifest", "write slow");
const summary = summarizeLogs()!;
assert.ok(summary.includes("2 warning(s)"));
assert.ok(!summary.includes("error"));
});
test("does not clear buffer", () => {
logError("intercept", "blocked");
summarizeLogs();
assert.equal(peekLogs().length, 1);
});
});
describe("formatForNotification", () => {
test("returns empty string for empty array", () => {
assert.equal(formatForNotification([]), "");
});
test("formats single entry without line breaks", () => {
logError("intercept", "blocked write");
const entries = drainLogs();
const formatted = formatForNotification(entries);
assert.equal(formatted, "[intercept] blocked write");
});
test("formats multiple entries with line breaks", () => {
logWarning("projection", "render failed");
logError("intercept", "blocked write");
const entries = drainLogs();
const formatted = formatForNotification(entries);
assert.ok(formatted.includes("[projection] render failed"));
assert.ok(formatted.includes("[intercept] blocked write"));
assert.ok(formatted.includes("\n"));
});
test("does not include context in formatted output", () => {
logError("tool", "failed", { cmd: "complete_task" });
const entries = drainLogs();
const formatted = formatForNotification(entries);
assert.equal(formatted, "[tool] failed");
assert.ok(!formatted.includes("complete_task"));
});
});
describe("buffer limit", () => {
test("caps at MAX_BUFFER entries, dropping oldest", () => {
const OVER = 110;
const MAX = 100;
for (let i = 0; i < OVER; i++) {
logWarning("engine", `msg-${i}`);
}
const entries = peekLogs();
assert.equal(entries.length, MAX);
// First MAX entries dropped; oldest surviving = msg-(OVER-MAX)
assert.equal(entries[0].message, `msg-${OVER - MAX}`);
assert.equal(entries[MAX - 1].message, `msg-${OVER - 1}`);
});
});
describe("stderr output", () => {
test("writes WARN prefix to stderr for warnings", (t) => {
const written: string[] = [];
const orig = process.stderr.write.bind(process.stderr);
// @ts-ignore — patching for test
process.stderr.write = (chunk: string) => { written.push(chunk); return true; };
t.after(() => { process.stderr.write = orig; });
logWarning("engine", "test warn");
assert.equal(written.length, 1);
assert.ok(written[0].includes("[gsd:engine] WARN: test warn"));
});
test("writes ERROR prefix to stderr for errors", (t) => {
const written: string[] = [];
const orig = process.stderr.write.bind(process.stderr);
// @ts-ignore — patching for test
process.stderr.write = (chunk: string) => { written.push(chunk); return true; };
t.after(() => { process.stderr.write = orig; });
logError("intercept", "blocked");
assert.ok(written[0].includes("[gsd:intercept] ERROR: blocked"));
});
test("includes serialized context in stderr output", (t) => {
const written: string[] = [];
const orig = process.stderr.write.bind(process.stderr);
// @ts-ignore — patching for test
process.stderr.write = (chunk: string) => { written.push(chunk); return true; };
t.after(() => { process.stderr.write = orig; });
logError("tool", "failed", { cmd: "complete_task" });
assert.ok(written[0].includes('"cmd":"complete_task"'));
});
});
});

View file

@ -0,0 +1,193 @@
// GSD Extension — Workflow Logger
// Centralized warning/error accumulator for the workflow engine pipeline.
// Captures structured entries that the auto-loop can drain after each unit
// to surface root causes for stuck loops, silent degradation, and blocked writes.
//
// Stderr policy: every logWarning/logError call writes immediately to stderr
// for terminal visibility. This is intentional — unlike debug-logger (which is
// opt-in and zero-overhead when disabled), workflow-logger covers operational
// warnings/errors that should always be visible. There is no disable flag.
//
// Singleton safety: _buffer is module-level and shared across all calls within
// a process. The auto-loop must call _resetLogs() (or drainAndSummarize()) at
// the start of each unit to prevent log bleed between units running in the same
// Node process.
// ─── Types ──────────────────────────────────────────────────────────────
export type LogSeverity = "warn" | "error";
export type LogComponent =
| "engine" // WorkflowEngine afterCommand side effects
| "projection" // Projection rendering
| "manifest" // Manifest write
| "event-log" // Event append
| "intercept" // Write intercept / tool-call blocks
| "migration" // Auto-migration from markdown
| "state" // deriveState fallback/degradation
| "tool" // Tool handler errors
| "compaction" // Event compaction
| "reconcile"; // Worktree reconciliation
export interface LogEntry {
ts: string;
severity: LogSeverity;
component: LogComponent;
message: string;
/** Optional structured context (file path, command name, etc.) */
context?: Record<string, string>;
}
// ─── Buffer ─────────────────────────────────────────────────────────────
const MAX_BUFFER = 100;
let _buffer: LogEntry[] = [];
// ─── Public API ─────────────────────────────────────────────────────────
/**
* Record a warning. Also writes to stderr for terminal visibility.
*/
export function logWarning(
component: LogComponent,
message: string,
context?: Record<string, string>,
): void {
_push("warn", component, message, context);
}
/**
* Record an error. Also writes to stderr for terminal visibility.
*/
export function logError(
component: LogComponent,
message: string,
context?: Record<string, string>,
): void {
_push("error", component, message, context);
}
/**
* Drain all accumulated entries and clear the buffer.
* Returns entries oldest-first.
*
* WARNING: Call summarizeLogs() or drainAndSummarize() BEFORE calling this
* if you need a summary drainLogs() clears the buffer immediately.
*/
export function drainLogs(): LogEntry[] {
const entries = _buffer;
_buffer = [];
return entries;
}
/**
* Atomically summarize then drain the safe way to consume logs.
* Use this in the auto-loop instead of calling summarizeLogs() + drainLogs()
* separately to avoid the ordering footgun.
*/
export function drainAndSummarize(): { logs: LogEntry[]; summary: string | null } {
const summary = summarizeLogs();
const logs = drainLogs();
return { logs, summary };
}
/**
* Peek at current entries without clearing.
*/
export function peekLogs(): readonly LogEntry[] {
return _buffer;
}
/**
* Returns true if the buffer contains any error-severity entries.
*/
export function hasErrors(): boolean {
return _buffer.some((e) => e.severity === "error");
}
/**
* Returns true if the buffer contains any warn-severity entries.
* Use hasAnyIssues() if you want to check for either severity.
*/
export function hasWarnings(): boolean {
return _buffer.some((e) => e.severity === "warn");
}
/**
* Returns true if the buffer contains any entries (warn or error).
*/
export function hasAnyIssues(): boolean {
return _buffer.length > 0;
}
/**
* Get a one-line summary of accumulated issues for stuck detection messages.
* Returns null if no entries.
*
* Must be called BEFORE drainLogs() use drainAndSummarize() for safe ordering.
*/
export function summarizeLogs(): string | null {
if (_buffer.length === 0) return null;
const errors = _buffer.filter((e) => e.severity === "error");
const warns = _buffer.filter((e) => e.severity === "warn");
const parts: string[] = [];
if (errors.length > 0) {
parts.push(`${errors.length} error(s): ${errors.map((e) => e.message).join("; ")}`);
}
if (warns.length > 0) {
parts.push(`${warns.length} warning(s): ${warns.map((e) => e.message).join("; ")}`);
}
return parts.join(" | ");
}
/**
* Format entries for display (used by auto-loop post-unit notification).
* Note: context fields are not included in the formatted output.
*/
export function formatForNotification(entries: readonly LogEntry[]): string {
if (entries.length === 0) return "";
if (entries.length === 1) {
const e = entries[0];
return `[${e.component}] ${e.message}`;
}
return entries
.map((e) => `[${e.component}] ${e.message}`)
.join("\n");
}
/**
* Reset buffer. Call at the start of each auto-loop unit to prevent log bleed
* between units running in the same process. Also used in tests via _resetLogs().
*/
export function _resetLogs(): void {
_buffer = [];
}
// ─── Internal ───────────────────────────────────────────────────────────
function _push(
severity: LogSeverity,
component: LogComponent,
message: string,
context?: Record<string, string>,
): void {
const entry: LogEntry = {
ts: new Date().toISOString(),
severity,
component,
message,
...(context ? { context } : {}),
};
// Always forward to stderr so terminal watchers see it (see module header for policy)
const prefix = severity === "error" ? "ERROR" : "WARN";
const ctxStr = context ? ` ${JSON.stringify(context)}` : "";
process.stderr.write(`[gsd:${component}] ${prefix}: ${message}${ctxStr}\n`);
// Buffer for auto-loop to drain
_buffer.push(entry);
if (_buffer.length > MAX_BUFFER) {
_buffer.shift();
}
}

View file

@ -19,6 +19,7 @@ import { existsSync, mkdirSync, readFileSync, realpathSync, rmSync } from "node:
import { execFileSync } from "node:child_process";
import { join, resolve, sep } from "node:path";
import { GSDError, GSD_PARSE_ERROR, GSD_STALE_STATE, GSD_LOCK_HELD, GSD_GIT_ERROR, GSD_MERGE_CONFLICT } from "./errors.js";
import { logWarning } from "./workflow-logger.js";
import {
nativeBranchDelete,
nativeBranchExists,
@ -136,9 +137,7 @@ export function createWorktree(basePath: string, name: string, opts: { branch?:
// worktree can be created in its place.
const gitFilePath = join(wtPath, ".git");
if (!existsSync(gitFilePath)) {
console.error(
`[GSD] Removing stale worktree directory (no .git file): ${wtPath}`,
);
logWarning("reconcile", `Removing stale worktree directory (no .git file): ${wtPath}`, { worktree: name });
rmSync(wtPath, { recursive: true, force: true });
} else {
throw new GSDError(GSD_STALE_STATE, `Worktree "${name}" already exists at ${wtPath}`);
@ -345,14 +344,10 @@ export function removeWorktree(
"git", ["stash", "push", "-m", "gsd: auto-stash submodule changes before worktree teardown"],
{ cwd: resolvedWtPath, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" },
);
process.stderr.write(
`[GSD] WARNING: Stashed uncommitted submodule changes in ${resolvedWtPath} before worktree teardown.\n`,
);
logWarning("reconcile", `Stashed uncommitted submodule changes before worktree teardown`, { worktree: name, path: resolvedWtPath });
} catch {
// Stash failed — warn the user that submodule changes may be lost
process.stderr.write(
`[GSD] WARNING: Submodule changes detected in ${resolvedWtPath} — stash failed, changes may be lost during force removal.\n`,
);
logWarning("reconcile", `Submodule changes detected — stash failed, changes may be lost during force removal`, { worktree: name, path: resolvedWtPath });
}
}
} catch {