diff --git a/src/resources/extensions/gsd/auto-dashboard.ts b/src/resources/extensions/gsd/auto-dashboard.ts index b4ddbc78a..688b6bdbc 100644 --- a/src/resources/extensions/gsd/auto-dashboard.ts +++ b/src/resources/extensions/gsd/auto-dashboard.ts @@ -31,6 +31,7 @@ import { getRtkSessionSavings, type RtkSessionSavings, } from "../shared/rtk-session-stats.js"; +import { logWarning } from "./workflow-logger.js"; // ─── UAT Slice Extraction ───────────────────────────────────────────────────── @@ -287,7 +288,7 @@ export function updateSliceProgressCache(base: string, mid: string, activeSid?: } } catch (err) { // Non-fatal — just omit task count - process.stderr.write(`gsd [auto-dashboard]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dashboard", `operation failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -300,7 +301,7 @@ export function updateSliceProgressCache(base: string, mid: string, activeSid?: }; } catch (err) { // Non-fatal — widget just won't show progress bar - process.stderr.write(`gsd [auto-dashboard]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dashboard", `operation failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -336,7 +337,7 @@ function refreshLastCommit(basePath: string): void { lastCommitFetchedAt = Date.now(); } catch (err) { // Non-fatal — just skip last commit display - process.stderr.write(`gsd [auto-dashboard]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dashboard", `operation failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -380,7 +381,7 @@ function ensureWidgetModeLoaded(): void { widgetMode = saved as WidgetMode; } } catch (err) { /* non-fatal — use default */ - process.stderr.write(`gsd [auto-dashboard]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dashboard", `operation failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -401,7 +402,7 @@ function persistWidgetMode(mode: WidgetMode): void { } writeFileSync(prefsPath, content, "utf-8"); } catch (err) { /* non-fatal — mode still set in memory */ - process.stderr.write(`gsd [auto-dashboard]: file write failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dashboard", `file write failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -466,7 +467,7 @@ export function updateProgressWidget( // Cache git branch at widget creation time (not per render) let cachedBranch: string | null = null; try { cachedBranch = getCurrentBranch(accessors.getBasePath()); } catch (err) { /* not in git repo */ - process.stderr.write(`gsd [auto-dashboard]: git branch detection failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dashboard", `git branch detection failed: ${err instanceof Error ? err.message : String(err)}`); } // Cache short pwd (last 2 path segments only) + worktree/branch info @@ -504,7 +505,8 @@ export function updateProgressWidget( const sessionId = ctx.sessionManager.getSessionId(); const savings = sessionId ? getRtkSessionSavings(accessors.getBasePath(), sessionId) : null; cachedRtkLabel = formatRtkSavingsLabel(savings); - } catch { + } catch (err) { + logWarning("dashboard", `RTK savings lookup failed: ${err instanceof Error ? (err as Error).message : String(err)}`); cachedRtkLabel = null; } }; @@ -529,7 +531,7 @@ export function updateProgressWidget( refreshRtkLabel(); cachedLines = undefined; } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-dashboard]: DB status update failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dashboard", `DB status update failed: ${err instanceof Error ? err.message : String(err)}`); } }, 15_000); diff --git a/src/resources/extensions/gsd/auto-dispatch.ts b/src/resources/extensions/gsd/auto-dispatch.ts index 925a50bdb..ccc3921e6 100644 --- a/src/resources/extensions/gsd/auto-dispatch.ts +++ b/src/resources/extensions/gsd/auto-dispatch.ts @@ -28,7 +28,7 @@ import { buildSliceFileName, } from "./paths.js"; import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; -import { logError } from "./workflow-logger.js"; +import { logWarning, logError } from "./workflow-logger.js"; import { join } from "node:path"; import { hasImplementationArtifacts } from "./auto-recovery.js"; import { @@ -713,7 +713,7 @@ export const DISPATCH_RULES: DispatchRule[] = [ } } } catch (err) { /* fall through — don't block on DB errors */ - process.stderr.write(`gsd [auto-dispatch]: lock cleanup failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dispatch", `verification class check failed: ${err instanceof Error ? err.message : String(err)}`); } return { @@ -758,7 +758,7 @@ export async function resolveDispatch( return await registry.evaluateDispatch(ctx); } catch (err) { // Registry not initialized — fall back to inline loop - process.stderr.write(`gsd [auto-dispatch]: dispatch failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dispatch", `registry dispatch failed, falling back to inline rules: ${err instanceof Error ? err.message : String(err)}`); } for (const rule of DISPATCH_RULES) { diff --git a/src/resources/extensions/gsd/auto-post-unit.ts b/src/resources/extensions/gsd/auto-post-unit.ts index 17cb19e37..41a9ea323 100644 --- a/src/resources/extensions/gsd/auto-post-unit.ts +++ b/src/resources/extensions/gsd/auto-post-unit.ts @@ -281,7 +281,7 @@ export async function postUnitPreVerification(pctx: PostUnitContext, opts?: PreV ghIssueNumber = getTaskIssueNumberForCommit(s.basePath, mid, sid, tid) ?? undefined; } catch (err) { // GitHub sync not available — skip - process.stderr.write(`gsd [auto-post-unit]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `GitHub issue lookup failed: ${err instanceof Error ? err.message : String(err)}`); } taskContext = { @@ -559,9 +559,7 @@ export async function postUnitPostVerification(pctx: PostUnitContext): Promise<" } catch (dbErr) { // DB unavailable — fail explicitly rather than silently reverting to markdown mutation. // Use 'gsd recover' to rebuild DB state from disk if needed. - process.stderr.write( - `gsd: retry state-reset failed (DB unavailable): ${(dbErr as Error).message}. Run 'gsd recover' to reconcile.\n`, - ); + logError("engine", `retry state-reset failed (DB unavailable): ${(dbErr as Error).message}. Run 'gsd recover' to reconcile.`); } } diff --git a/src/resources/extensions/gsd/auto-prompts.ts b/src/resources/extensions/gsd/auto-prompts.ts index cf3fd5682..97ce5cf3f 100644 --- a/src/resources/extensions/gsd/auto-prompts.ts +++ b/src/resources/extensions/gsd/auto-prompts.ts @@ -27,6 +27,7 @@ import { computeBudgets, resolveExecutorContextWindow, truncateAtSectionBoundary import { getPendingGates } from "./gsd-db.js"; import { formatDecisionsCompact, formatRequirementsCompact } from "./structured-data-formatter.js"; import { readPhaseAnchor, formatAnchorForPrompt } from "./phase-anchor.js"; +import { logWarning } from "./workflow-logger.js"; // ─── Preamble Cap ───────────────────────────────────────────────────────────── @@ -49,7 +50,8 @@ function formatExecutorConstraints(): string { try { const prefs = loadEffectiveGSDPreferences(); windowTokens = resolveExecutorContextWindow(undefined, prefs?.preferences); - } catch { + } catch (e) { + logWarning("prompt", `resolveExecutorContextWindow failed: ${(e as Error).message}`); windowTokens = 200_000; // safe default } const budgets = computeBudgets(windowTokens); @@ -198,8 +200,8 @@ export async function inlineDependencySummaries( } // If slice not found in DB, fall through to file-based parsing } - } catch (err) { /* fall through */ - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + } catch (err) { + logWarning("prompt", `inlineDependencySummaries DB lookup failed: ${err instanceof Error ? err.message : String(err)}`); } // If DB didn't provide depends, fall back to roadmap parsing @@ -279,8 +281,7 @@ export async function inlineDecisionsFromDb( } } } catch (err) { - // DB not available — fall through to filesystem - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("prompt", `inlineDecisionsFromDb failed: ${err instanceof Error ? err.message : String(err)}`); } return inlineGsdRootFile(base, "decisions.md", "Decisions"); } @@ -307,8 +308,7 @@ export async function inlineRequirementsFromDb( } } } catch (err) { - // DB not available — fall through to filesystem - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("prompt", `inlineRequirementsFromDb failed: ${err instanceof Error ? err.message : String(err)}`); } return inlineGsdRootFile(base, "requirements.md", "Requirements"); } @@ -330,8 +330,7 @@ export async function inlineProjectFromDb( } } } catch (err) { - // DB not available — fall through to filesystem - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("prompt", `inlineProjectFromDb failed: ${err instanceof Error ? err.message : String(err)}`); } return inlineGsdRootFile(base, "project.md", "Project"); } @@ -492,8 +491,7 @@ export function buildSkillActivationBlock(params: { matched.add(normalizeSkillReference(skillName)); } } catch (err) { - // Non-fatal — malformed task plan should not break prompt construction - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("prompt", `parseTaskPlanFile failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -742,8 +740,8 @@ export async function checkNeedsReassessment( return { sliceId: lastCompleted }; } } - } catch (err) { /* fall through */ - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + } catch (err) { + logWarning("prompt", `checkNeedsReassessment DB lookup failed: ${err instanceof Error ? err.message : String(err)}`); } // File-based fallback using roadmap checkboxes @@ -810,8 +808,8 @@ export async function checkNeedsRunUat( return { sliceId: sid, uatType }; } } - } catch (err) { /* fall through */ - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + } catch (err) { + logWarning("prompt", `checkNeedsRunUat DB lookup failed: ${err instanceof Error ? err.message : String(err)}`); } // File-based fallback using roadmap checkboxes @@ -1322,8 +1320,8 @@ export async function buildCompleteMilestonePrompt( if (isDbAvailable()) { sliceIds = getMilestoneSlices(mid).map(s => s.id); } - } catch (err) { /* fall through */ - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + } catch (err) { + logWarning("prompt", `buildCompleteMilestonePrompt DB lookup failed: ${err instanceof Error ? err.message : String(err)}`); } // File-based fallback: parse roadmap for slice IDs when DB has no data if (sliceIds.length === 0 && roadmapPath) { @@ -1405,8 +1403,8 @@ export async function buildValidateMilestonePrompt( } } } - } catch (err) { /* fall through */ - process.stderr.write(`gsd [auto-prompts]: git push failed: ${err instanceof Error ? err.message : String(err)}\n`); + } catch (err) { + logWarning("prompt", `buildValidateMilestonePrompt verification classes lookup failed: ${err instanceof Error ? err.message : String(err)}`); } // Inline all slice summaries and UAT results @@ -1416,8 +1414,8 @@ export async function buildValidateMilestonePrompt( if (isDbAvailable()) { valSliceIds = getMilestoneSlices(mid).map(s => s.id); } - } catch (err) { /* fall through */ - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + } catch (err) { + logWarning("prompt", `buildValidateMilestonePrompt slice IDs lookup failed: ${err instanceof Error ? err.message : String(err)}`); } // File-based fallback: parse roadmap for slice IDs when DB has no data if (valSliceIds.length === 0 && roadmapPath) { @@ -1558,8 +1556,7 @@ export async function buildReplanSlicePrompt( ).join("\n"); } } catch (err) { - // Non-fatal — captures module may not be available - process.stderr.write(`gsd [auto-prompts]: capture count failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("prompt", `loadReplanCaptures failed: ${err instanceof Error ? err.message : String(err)}`); } return loadPrompt("replan-slice", { @@ -1660,8 +1657,7 @@ export async function buildReassessRoadmapPrompt( ).join("\n"); } } catch (err) { - // Non-fatal — captures module may not be available - process.stderr.write(`gsd [auto-prompts]: capture count failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("prompt", `loadDeferredCaptures failed: ${err instanceof Error ? err.message : String(err)}`); } const reassessCommitInstruction = "Do not commit — .gsd/ planning docs are managed externally and not tracked in git."; @@ -1877,8 +1873,8 @@ export async function buildRewriteDocsPrompt( .filter(t => t.status !== "complete" && t.status !== "done") .map(t => ({ id: t.id })); } - } catch (err) { /* fall through */ - process.stderr.write(`gsd [auto-prompts]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + } catch (err) { + logWarning("prompt", `buildRewriteDocsPrompt DB task lookup failed: ${err instanceof Error ? err.message : String(err)}`); } if (!incompleteTasks) { diff --git a/src/resources/extensions/gsd/auto-recovery.ts b/src/resources/extensions/gsd/auto-recovery.ts index 5877d3674..15f3e32c4 100644 --- a/src/resources/extensions/gsd/auto-recovery.ts +++ b/src/resources/extensions/gsd/auto-recovery.ts @@ -15,6 +15,7 @@ import { parseRoadmap as parseLegacyRoadmap, parsePlan as parseLegacyPlan } from import { isDbAvailable, getTask, getSlice, getSliceTasks, updateTaskStatus } from "./gsd-db.js"; import { isValidationTerminal } from "./state.js"; import { getErrorMessage } from "./error-utils.js"; +import { logWarning, logError } from "./workflow-logger.js"; import { nativeConflictFiles, nativeCommit, @@ -72,7 +73,8 @@ export function hasImplementationArtifacts(basePath: string): boolean { stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8", }); - } catch { + } catch (e) { + logWarning("recovery", `git rev-parse check failed: ${(e as Error).message}`); return true; } @@ -92,8 +94,9 @@ export function hasImplementationArtifacts(basePath: string): boolean { // implementation code (#1703). const implFiles = changedFiles.filter(f => !f.startsWith(".gsd/") && !f.startsWith(".gsd\\")); return implFiles.length > 0; - } catch { + } catch (e) { // Non-fatal — if git operations fail, don't block the pipeline + logWarning("recovery", `implementation artifact check failed: ${(e as Error).message}`); return true; } } @@ -111,7 +114,7 @@ function detectMainBranch(basePath: string): string { if (result.trim()) return "main"; } catch (err) { // main doesn't exist - process.stderr.write(`gsd [auto-recovery]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("recovery", `main branch not found: ${err instanceof Error ? err.message : String(err)}`); } try { const result = execFileSync("git", ["rev-parse", "--verify", "master"], { @@ -122,7 +125,7 @@ function detectMainBranch(basePath: string): string { if (result.trim()) return "master"; } catch (err) { // master doesn't exist either - process.stderr.write(`gsd [auto-recovery]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("recovery", `master branch not found: ${err instanceof Error ? err.message : String(err)}`); } return "main"; // default fallback } @@ -148,7 +151,7 @@ function getChangedFilesSinceBranch(basePath: string, targetBranch: string): str } } catch (err) { // merge-base failed — fall back - process.stderr.write(`gsd [auto-recovery]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("recovery", `merge-base detection failed: ${err instanceof Error ? err.message : String(err)}`); } // Fallback: check last 20 commits @@ -158,7 +161,8 @@ function getChangedFilesSinceBranch(basePath: string, targetBranch: string): str { cwd: basePath, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }, ).trim(); return result ? [...new Set(result.split("\n").filter(Boolean))] : []; - } catch { + } catch (e) { + logWarning("recovery", `git log fallback failed: ${(e as Error).message}`); return []; } } @@ -251,7 +255,7 @@ export function verifyExpectedArtifact( } } catch (err) { // DB unavailable — treat as verified to avoid blocking - process.stderr.write(`gsd [auto-recovery]: dispatch failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("recovery", `gate-evaluate DB check failed: ${err instanceof Error ? err.message : String(err)}`); } return true; } @@ -341,7 +345,7 @@ export function verifyExpectedArtifact( } } catch (err) { // Parse failure — don't block; slice plan may have non-standard format - process.stderr.write(`gsd [auto-recovery]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("recovery", `plan-slice task plan verification failed: ${err instanceof Error ? err.message : String(err)}`); } } } @@ -371,7 +375,8 @@ export function verifyExpectedArtifact( const roadmap = parseLegacyRoadmap(roadmapContent); const slice = roadmap.slices.find((s) => s.id === sid); if (slice && !slice.done) return false; - } catch { + } catch (e) { + logWarning("recovery", `roadmap parse failed: ${(e as Error).message}`); return false; } } @@ -424,7 +429,7 @@ export function writeBlockerPlaceholder( const { milestone: mid, slice: sid, task: tid } = parseUnitId(unitId); if (mid && sid && tid) { try { updateTaskStatus(mid, sid, tid, "complete", new Date().toISOString()); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-recovery]: DB status update failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("recovery", `DB status update failed: ${err instanceof Error ? err.message : String(err)}`); } } } @@ -448,21 +453,21 @@ function abortAndResetMerge( nativeMergeAbort(basePath); } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-recovery]: git merge-abort failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("recovery", `git merge-abort failed: ${err instanceof Error ? err.message : String(err)}`); } } else if (squashMsgPath) { try { unlinkSync(squashMsgPath); } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-recovery]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("recovery", `file unlink failed: ${err instanceof Error ? err.message : String(err)}`); } } try { nativeResetHard(basePath); } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-recovery]: git reset failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("recovery", `git reset failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -510,7 +515,8 @@ export function reconcileMergeState( try { nativeCheckoutTheirs(basePath, gsdConflicts); nativeAddPaths(basePath, gsdConflicts); - } catch { + } catch (e) { + logError("recovery", `auto-resolve .gsd/ conflicts failed: ${(e as Error).message}`); resolved = false; } if (resolved) { @@ -523,7 +529,8 @@ export function reconcileMergeState( `Auto-resolved ${gsdConflicts.length} .gsd/ state file conflict(s) from prior merge.`, "info", ); - } catch { + } catch (e) { + logError("recovery", `auto-commit .gsd/ conflict resolution failed: ${(e as Error).message}`); resolved = false; } } diff --git a/src/resources/extensions/gsd/auto-start.ts b/src/resources/extensions/gsd/auto-start.ts index d9c4c94d8..11e0a621e 100644 --- a/src/resources/extensions/gsd/auto-start.ts +++ b/src/resources/extensions/gsd/auto-start.ts @@ -66,6 +66,7 @@ import { isDebugEnabled, getDebugLogPath, } from "./debug-logger.js"; +import { logWarning, logError } from "./workflow-logger.js"; import { parseUnitId } from "./unit-id.js"; import type { AutoSession } from "./auto/session.js"; import { @@ -114,7 +115,7 @@ async function openProjectDbIfPresent(basePath: string): Promise { openDatabase(gsdDbPath); } catch (err) { /* non-fatal — DB lifecycle block below will retry */ - process.stderr.write(`gsd [auto-start]: DB open failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `DB open failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -216,7 +217,7 @@ export async function bootstrapAutoSession( nativeCommit(base, "chore: init gsd"); } catch (err) { /* nothing to commit */ - process.stderr.write(`gsd [auto-start]: mkdir failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `mkdir failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -577,9 +578,7 @@ export async function bootstrapAutoSession( migrateFromMarkdown(s.basePath); } } catch (err) { - process.stderr.write( - `gsd-migrate: auto-migration failed: ${(err as Error).message}\n`, - ); + logError("engine", `auto-migration failed: ${(err as Error).message}`); } } if (existsSync(gsdDbPath) && !isDbAvailable()) { @@ -587,9 +586,7 @@ export async function bootstrapAutoSession( const { openDatabase: openDb } = await import("./gsd-db.js"); openDb(gsdDbPath); } catch (err) { - process.stderr.write( - `gsd-db: failed to open existing database: ${(err as Error).message}\n`, - ); + logError("engine", `failed to open existing database: ${(err as Error).message}`); } } @@ -728,7 +725,7 @@ export async function bootstrapAutoSession( } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-start]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `preflight validation failed: ${err instanceof Error ? err.message : String(err)}`); } return true; diff --git a/src/resources/extensions/gsd/auto-timers.ts b/src/resources/extensions/gsd/auto-timers.ts index ab34fa052..2b14365d0 100644 --- a/src/resources/extensions/gsd/auto-timers.ts +++ b/src/resources/extensions/gsd/auto-timers.ts @@ -24,6 +24,7 @@ import { saveActivityLog } from "./activity-log.js"; import { recoverTimedOutUnit, type RecoveryContext } from "./auto-timeout-recovery.js"; import { resolveAgentEndCancelled } from "./auto/resolve.js"; import type { AutoSession } from "./auto/session.js"; +import { logWarning, logError } from "./workflow-logger.js"; export interface SupervisionContext { s: AutoSession; @@ -101,7 +102,7 @@ export function startUnitSupervision(sctx: SupervisionContext): void { } } catch (err) { // Non-fatal — fall through with no estimate - process.stderr.write(`gsd [auto-timers]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("timer", `operation failed: ${err instanceof Error ? err.message : String(err)}`); } } const estimateMinutes = taskEstimate ? parseEstimateMinutes(taskEstimate) : null; @@ -215,13 +216,13 @@ export function startUnitSupervision(sctx: SupervisionContext): void { await pauseAuto(ctx, pi); } catch (err) { const message = err instanceof Error ? err.message : String(err); - console.error(`[idle-watchdog] Unhandled error: ${message}`); + logError("timer", `[idle-watchdog] Unhandled error: ${message}`); // Unblock any pending unit promise so the auto-loop is not orphaned. resolveAgentEndCancelled({ message: `Idle watchdog error: ${message}`, category: "idle", isTransient: true }); try { ctx.ui.notify(`Idle watchdog error: ${message}`, "warning"); } catch (err) { /* best effort */ - process.stderr.write(`gsd [auto-timers]: notification failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("timer", `notification failed: ${err instanceof Error ? err.message : String(err)}`); } } }, 15000); @@ -251,13 +252,13 @@ export function startUnitSupervision(sctx: SupervisionContext): void { await pauseAuto(ctx, pi); } catch (err) { const message = err instanceof Error ? err.message : String(err); - console.error(`[hard-timeout] Unhandled error: ${message}`); + logError("timer", `[hard-timeout] Unhandled error: ${message}`); // Unblock any pending unit promise so the auto-loop is not orphaned. resolveAgentEndCancelled({ message: `Hard timeout error: ${message}`, category: "timeout", isTransient: true }); try { ctx.ui.notify(`Hard timeout error: ${message}`, "warning"); } catch (err) { /* best effort */ - process.stderr.write(`gsd [auto-timers]: notification failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("timer", `notification failed: ${err instanceof Error ? err.message : String(err)}`); } } }, hardTimeoutMs); diff --git a/src/resources/extensions/gsd/auto-unit-closeout.ts b/src/resources/extensions/gsd/auto-unit-closeout.ts index 40c02bf01..ccd274176 100644 --- a/src/resources/extensions/gsd/auto-unit-closeout.ts +++ b/src/resources/extensions/gsd/auto-unit-closeout.ts @@ -7,6 +7,7 @@ import type { ExtensionContext } from "@gsd/pi-coding-agent"; import { snapshotUnitMetrics } from "./metrics.js"; import { saveActivityLog } from "./activity-log.js"; +import { logWarning } from "./workflow-logger.js"; export interface CloseoutOptions { promptCharCount?: number; @@ -38,11 +39,11 @@ export async function closeoutUnit( const llmCallFn = buildMemoryLLMCall(ctx); if (llmCallFn) { extractMemoriesFromUnit(activityFile, unitType, unitId, llmCallFn).catch((err) => { - if (process.env.GSD_DEBUG) console.error(`[gsd] memory extraction failed for ${unitType}/${unitId}:`, err); + logWarning("engine", `memory extraction failed for ${unitType}/${unitId}: ${(err as Error).message}`); }); } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-unit-closeout]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `operation failed: ${err instanceof Error ? err.message : String(err)}`); } } diff --git a/src/resources/extensions/gsd/auto-verification.ts b/src/resources/extensions/gsd/auto-verification.ts index 0312b60b8..e7f915a66 100644 --- a/src/resources/extensions/gsd/auto-verification.ts +++ b/src/resources/extensions/gsd/auto-verification.ts @@ -22,6 +22,7 @@ import { runDependencyAudit, } from "./verification-gate.js"; import { writeVerificationJSON } from "./verification-evidence.js"; +import { logWarning } from "./workflow-logger.js"; import type { AutoSession } from "./auto/session.js"; import { join } from "node:path"; @@ -159,9 +160,7 @@ export async function runPostUnitVerification( } } } catch (evidenceErr) { - process.stderr.write( - `verification-evidence: write error — ${(evidenceErr as Error).message}\n`, - ); + logWarning("engine", `verification-evidence write error: ${(evidenceErr as Error).message}`); } } @@ -217,9 +216,7 @@ export async function runPostUnitVerification( } } catch (err) { // Gate errors are non-fatal - process.stderr.write( - `verification-gate: error — ${(err as Error).message}\n`, - ); + logWarning("engine", `verification-gate error: ${(err as Error).message}`); return "continue"; } } diff --git a/src/resources/extensions/gsd/auto-worktree.ts b/src/resources/extensions/gsd/auto-worktree.ts index 66346a886..7cd49cd13 100644 --- a/src/resources/extensions/gsd/auto-worktree.ts +++ b/src/resources/extensions/gsd/auto-worktree.ts @@ -44,7 +44,7 @@ import { } from "./worktree.js"; import { MergeConflictError, readIntegrationBranch, RUNTIME_EXCLUSION_PATHS } from "./git-service.js"; import { debugLog } from "./debug-logger.js"; -import { logWarning } from "./workflow-logger.js"; +import { logWarning, logError } from "./workflow-logger.js"; import { loadEffectiveGSDPreferences } from "./preferences.js"; import { nativeGetCurrentBranch, @@ -100,7 +100,8 @@ const ROOT_STATE_FILES = [ function isSamePath(a: string, b: string): boolean { try { return realpathSync(a) === realpathSync(b); - } catch { + } catch (e) { + logWarning("worktree", `isSamePath failed: ${(e as Error).message}`); return false; } } @@ -155,17 +156,17 @@ function forceOverwriteAssessmentsWithVerdict( safeCopy(srcFile, join(dstSliceDir, fileEntry.name), { force: true }); } catch (err) { /* non-fatal per file */ - process.stderr.write(`gsd [auto-worktree]: mkdir failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `assessment force-copy failed: ${err instanceof Error ? err.message : String(err)}`); } } } catch (err) { /* non-fatal per slice */ - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `assessment slice scan failed: ${err instanceof Error ? err.message : String(err)}`); } } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `assessment sync failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -187,7 +188,7 @@ function clearProjectRootStateFiles(basePath: string, milestoneId: string): void unlinkSync(file); } catch (err) { /* non-fatal — file may not exist */ - process.stderr.write(`gsd [auto-worktree]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `file unlink failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -217,14 +218,14 @@ function clearProjectRootStateFiles(basePath: string, milestoneId: string): void unlinkSync(join(basePath, f)); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `untracked file unlink failed: ${err instanceof Error ? err.message : String(err)}`); } } } } } catch (err) { /* non-fatal — git command may fail if not in repo */ - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `untracked file cleanup failed: ${err instanceof Error ? err.message : String(err)}`); } } } @@ -321,7 +322,7 @@ export function syncProjectRootToWorktree( } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `worktree DB cleanup failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -390,7 +391,8 @@ export function readResourceVersion(): string | null { return typeof manifest?.gsdVersion === "string" ? manifest.gsdVersion : null; - } catch { + } catch (e) { + logWarning("worktree", `readResourceVersion failed: ${(e as Error).message}`); return null; } } @@ -456,8 +458,9 @@ export function escapeStaleWorktree(base: string): string { try { process.chdir(projectRoot); - } catch { + } catch (e) { // If chdir fails, return the original — caller will handle errors downstream + logWarning("worktree", `escapeStaleWorktree chdir failed: ${(e as Error).message}`); return base; } return projectRoot; @@ -489,13 +492,13 @@ export function cleanStaleRuntimeUnits( cleaned++; } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `stale runtime unit unlink failed (${file}): ${err instanceof Error ? err.message : String(err)}`); } } } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `stale runtime unit cleanup failed: ${err instanceof Error ? err.message : String(err)}`); } return cleaned; } @@ -539,7 +542,7 @@ export function syncGsdStateToWorktree( synced.push(f); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `file copy failed (${f}): ${err instanceof Error ? err.message : String(err)}`); } } } @@ -560,7 +563,7 @@ export function syncGsdStateToWorktree( synced.push(file); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `preferences copy failed (${file}): ${err instanceof Error ? err.message : String(err)}`); } break; } @@ -591,7 +594,7 @@ export function syncGsdStateToWorktree( synced.push(`milestones/${mid}/`); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `milestone copy failed (${mid}): ${err instanceof Error ? err.message : String(err)}`); } } else { // Milestone directory exists but may be missing files (stale snapshot). @@ -612,7 +615,7 @@ export function syncGsdStateToWorktree( } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `milestone file copy failed (${mid}/${f}): ${err instanceof Error ? err.message : String(err)}`); } } } @@ -626,7 +629,7 @@ export function syncGsdStateToWorktree( synced.push(`milestones/${mid}/slices/`); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `slices copy failed (${mid}): ${err instanceof Error ? err.message : String(err)}`); } } else if (existsSync(srcSlicesDir) && existsSync(dstSlicesDir)) { // Both exist — sync missing slice directories @@ -644,20 +647,20 @@ export function syncGsdStateToWorktree( synced.push(`milestones/${mid}/slices/${sid}/`); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `slice copy failed (${mid}/${sid}): ${err instanceof Error ? err.message : String(err)}`); } } } } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `milestone file sync failed: ${err instanceof Error ? err.message : String(err)}`); } } } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `milestone directory sync failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -711,7 +714,7 @@ export function syncWorktreeStateBack( synced.push("gsd.db (pre-upgrade reconcile)"); } catch (err) { // Non-fatal — file sync below is the fallback - process.stderr.write(`gsd [auto-worktree]: DB reconciliation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("worktree", `DB reconciliation failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -730,7 +733,7 @@ export function syncWorktreeStateBack( synced.push(f); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `state file copy-back failed (${f}): ${err instanceof Error ? err.message : String(err)}`); } } } @@ -752,7 +755,7 @@ export function syncWorktreeStateBack( } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `milestone sync-back failed: ${err instanceof Error ? err.message : String(err)}`); } return { synced }; @@ -778,12 +781,12 @@ function syncDirFiles( synced.push(`${prefix}${entry.name}`); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `file copy failed (${prefix}${entry.name}): ${err instanceof Error ? err.message : String(err)}`); } } } catch (err) { /* non-fatal — srcDir may not be readable */ - process.stderr.write(`gsd [auto-worktree]: git push failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `directory read failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -828,7 +831,7 @@ function syncMilestoneDir( } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: mkdir failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `milestone slice sync failed (${mid}): ${err instanceof Error ? err.message : String(err)}`); } } // ─── Worktree Post-Create Hook (#597) ──────────────────────────────────────── @@ -861,7 +864,7 @@ export function runWorktreePostCreateHook( } if (process.platform === "win32") { try { resolved = realpathSync.native(resolved); } catch (err) { /* keep original */ - process.stderr.write(`gsd [auto-worktree]: realpath failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `realpath failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -948,7 +951,7 @@ function reconcilePlanCheckboxes( } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: git push failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `walkMd directory read failed: ${err instanceof Error ? err.message : String(err)}`); } return results; } @@ -963,7 +966,8 @@ function reconcilePlanCheckboxes( try { srcContent = readFileSync(srcFile, "utf-8"); dstContent = readFileSync(dstFile, "utf-8"); - } catch { + } catch (e) { + logWarning("worktree", `reconcilePlanCheckboxes read failed: ${(e as Error).message}`); continue; } @@ -1000,7 +1004,7 @@ function reconcilePlanCheckboxes( atomicWriteSync(dstFile, updated, "utf-8"); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: file write failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `plan checkbox reconcile write failed: ${err instanceof Error ? err.message : String(err)}`); } } } @@ -1187,7 +1191,7 @@ export function teardownAutoWorktree( rmSync(wtDir, { recursive: true, force: true }); } catch (err) { // Non-fatal — the warning above tells the user how to clean up - process.stderr.write(`gsd [auto-worktree]: file removal failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `worktree directory removal failed: ${err instanceof Error ? err.message : String(err)}`); } } } @@ -1228,7 +1232,8 @@ export function getAutoWorktreePath( try { const content = readFileSync(gitPath, "utf8").trim(); if (!content.startsWith("gitdir: ")) return null; - } catch { + } catch (e) { + logWarning("worktree", `getAutoWorktreePath .git read failed: ${(e as Error).message}`); return null; } @@ -1389,7 +1394,7 @@ export function mergeMilestoneToMain( } } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto-worktree]: DB reconciliation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("worktree", `DB reconciliation failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -1547,7 +1552,7 @@ export function mergeMilestoneToMain( } catch (err) { // Stash failure is non-fatal — proceed without stash and let the merge // report the dirty tree if it fails. - process.stderr.write(`gsd [auto-worktree]: git stash failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `git stash failed: ${err instanceof Error ? err.message : String(err)}`); } // 7a. Shelter queued milestone directories before the squash merge (#2505). @@ -1569,11 +1574,11 @@ export function mergeMilestoneToMain( mkdirSync(milestonesDir, { recursive: true }); cpSync(join(shelterDir, dirName), join(milestonesDir, dirName), { recursive: true, force: true }); } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("worktree", `shelter restore failed: ${err instanceof Error ? err.message : String(err)}`); } } try { rmSync(shelterDir, { recursive: true, force: true }); } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-worktree]: shelter cleanup failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `shelter cleanup failed: ${err instanceof Error ? err.message : String(err)}`); } }; @@ -1593,13 +1598,13 @@ export function mergeMilestoneToMain( shelteredDirs.push(entry.name); } catch (err) { // Non-fatal — if shelter fails, the merge may still succeed - process.stderr.write(`gsd [auto-worktree]: file copy failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `milestone shelter failed (${entry.name}): ${err instanceof Error ? err.message : String(err)}`); } } } } catch (err) { // Non-fatal — proceed with merge; untracked files may block it - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `milestone shelter operation failed: ${err instanceof Error ? err.message : String(err)}`); } // 7b. Clean up stale merge state before attempting squash merge (#2912). @@ -1614,7 +1619,7 @@ export function mergeMilestoneToMain( if (existsSync(p)) unlinkSync(p); } } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-worktree]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("worktree", `merge state cleanup failed: ${err instanceof Error ? err.message : String(err)}`); } // 8. Squash merge — auto-resolve .gsd/ state file conflicts (#530) @@ -1634,7 +1639,7 @@ export function mergeMilestoneToMain( if (existsSync(p)) unlinkSync(p); } } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-worktree]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("worktree", `merge state cleanup failed: ${err instanceof Error ? err.message : String(err)}`); } // Pop stash before throwing so local work is not lost. @@ -1646,7 +1651,7 @@ export function mergeMilestoneToMain( encoding: "utf-8", }); } catch (err) { /* stash pop conflict is non-fatal */ - process.stderr.write(`gsd [auto-worktree]: git stash failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `git stash pop failed: ${err instanceof Error ? err.message : String(err)}`); } } restoreShelter(); @@ -1686,9 +1691,10 @@ export function mergeMilestoneToMain( try { nativeCheckoutTheirs(originalBasePath_, [safeFile]); nativeAddPaths(originalBasePath_, [safeFile]); - } catch { + } catch (e) { // If checkout --theirs fails, try removing the file from the merge // (it's a runtime file that shouldn't be committed anyway) + logWarning("worktree", `checkout --theirs failed for ${safeFile}, removing: ${(e as Error).message}`); nativeRmForce(originalBasePath_, [safeFile]); } } @@ -1700,7 +1706,7 @@ export function mergeMilestoneToMain( // libgit2's merge creates MERGE_HEAD even for squash merges; if left // dangling, subsequent merges fail and doctor reports corrupt state. try { nativeMergeAbort(originalBasePath_); } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-worktree]: git merge-abort failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("worktree", `git merge-abort failed: ${err instanceof Error ? err.message : String(err)}`); } try { const gitDir_ = resolveGitDir(originalBasePath_); @@ -1709,7 +1715,7 @@ export function mergeMilestoneToMain( if (existsSync(p)) unlinkSync(p); } } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-worktree]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("worktree", `merge state file cleanup failed: ${err instanceof Error ? err.message : String(err)}`); } // Pop stash before throwing so local work is not lost (#2151). @@ -1721,7 +1727,7 @@ export function mergeMilestoneToMain( encoding: "utf-8", }); } catch (err) { /* stash pop conflict is non-fatal */ - process.stderr.write(`gsd [auto-worktree]: git stash failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `git stash pop failed: ${err instanceof Error ? err.message : String(err)}`); } } restoreShelter(); @@ -1753,7 +1759,7 @@ export function mergeMilestoneToMain( if (existsSync(p)) unlinkSync(p); } } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto-worktree]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logError("worktree", `post-commit merge state cleanup failed: ${err instanceof Error ? err.message : String(err)}`); } // 9a-ii. Restore stashed files now that the merge+commit is complete (#2151). @@ -1767,7 +1773,8 @@ export function mergeMilestoneToMain( stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8", }); - } catch { + } catch (e) { + logWarning("worktree", `git stash pop failed, attempting conflict resolution: ${(e as Error).message}`); // Stash pop after squash merge can conflict on .gsd/ state files that // diverged between branches. Left unresolved, these UU entries block // every subsequent merge. Auto-resolve them the same way we handle @@ -1787,8 +1794,9 @@ export function mergeMilestoneToMain( encoding: "utf-8", }); nativeAddPaths(originalBasePath_, [f]); - } catch { + } catch (e) { // Last resort: remove the conflicted state file + logWarning("worktree", `checkout HEAD failed for ${f}, removing: ${(e as Error).message}`); nativeRmForce(originalBasePath_, [f]); } } @@ -1803,7 +1811,7 @@ export function mergeMilestoneToMain( encoding: "utf-8", }); } catch (err) { /* stash may already be consumed */ - process.stderr.write(`gsd [auto-worktree]: git stash failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `git stash drop failed: ${err instanceof Error ? err.message : String(err)}`); } } else { // Non-.gsd conflicts remain — leave stash for manual resolution @@ -1857,8 +1865,9 @@ export function mergeMilestoneToMain( codeFilesChanged = mergedFiles.some( (entry) => !entry.path.startsWith(".gsd/"), ); - } catch { + } catch (e) { // If HEAD~1 doesn't exist (first commit), assume code was changed + logWarning("worktree", `diff numstat failed (assuming code changed): ${(e as Error).message}`); codeFilesChanged = true; } } @@ -1876,7 +1885,7 @@ export function mergeMilestoneToMain( pushed = true; } catch (err) { // Push failure is non-fatal - process.stderr.write(`gsd [auto-worktree]: git push failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `git push failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -1907,7 +1916,7 @@ export function mergeMilestoneToMain( prCreated = true; } catch (err) { // PR creation failure is non-fatal — gh may not be installed or authenticated - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `PR creation failed: ${err instanceof Error ? err.message : String(err)}`); } } @@ -1947,7 +1956,7 @@ export function mergeMilestoneToMain( }); } catch (err) { // Best-effort -- worktree dir may already be gone - process.stderr.write(`gsd [auto-worktree]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `worktree removal failed: ${err instanceof Error ? err.message : String(err)}`); } // 13. Delete milestone branch (after worktree removal so ref is unlocked) @@ -1955,7 +1964,7 @@ export function mergeMilestoneToMain( nativeBranchDelete(originalBasePath_, milestoneBranch); } catch (err) { // Best-effort - process.stderr.write(`gsd [auto-worktree]: git branch-delete failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("worktree", `git branch-delete failed: ${err instanceof Error ? err.message : String(err)}`); } // 14. Clear module state diff --git a/src/resources/extensions/gsd/auto.ts b/src/resources/extensions/gsd/auto.ts index ab00160bc..4fa51648e 100644 --- a/src/resources/extensions/gsd/auto.ts +++ b/src/resources/extensions/gsd/auto.ts @@ -115,7 +115,7 @@ import { formatCost, formatTokenCount, } from "./metrics.js"; -import { setLogBasePath } from "./workflow-logger.js"; +import { setLogBasePath, logWarning, logError } from "./workflow-logger.js"; import { join } from "node:path"; import { readFileSync, existsSync, mkdirSync, writeFileSync, unlinkSync } from "node:fs"; import { atomicWriteSync } from "./atomic-write.js"; @@ -318,7 +318,7 @@ export function getAutoDashboardData(): AutoDashboardData { } } catch (err) { // Non-fatal — captures module may not be loaded - process.stderr.write(`gsd [auto]: capture count failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `capture count failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } return { active: s.active, @@ -568,7 +568,7 @@ function cleanupAfterLoopExit(ctx: ExtensionContext): void { if (lockBase()) releaseSessionLock(lockBase()); } catch (err) { /* best-effort — mirror stopAuto cleanup */ - process.stderr.write(`gsd [auto]: lock cleanup failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("session", `lock cleanup failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } ctx.ui.setStatus("gsd-auto", undefined); @@ -582,7 +582,7 @@ function cleanupAfterLoopExit(ctx: ExtensionContext): void { process.chdir(s.basePath); } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto]: chdir failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `chdir failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } } } @@ -656,7 +656,7 @@ export async function stopAuto( } } catch (err) { // Non-fatal — fall through to preserveBranch path - process.stderr.write(`gsd [auto]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `milestone summary check failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } if (milestoneComplete) { @@ -693,7 +693,7 @@ export async function stopAuto( process.chdir(s.basePath); } catch (err) { /* best-effort */ - process.stderr.write(`gsd [auto]: chdir failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `chdir failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } } } catch (e) { @@ -766,7 +766,7 @@ export async function stopAuto( const pausedPath = join(gsdRoot(s.originalBasePath || s.basePath), "runtime", "paused-session.json"); if (existsSync(pausedPath)) unlinkSync(pausedPath); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto]: file unlink failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `file unlink failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } // ── Step 13: Restore original model (before reset clears IDs) ── @@ -802,7 +802,7 @@ export async function stopAuto( await closeBrowser(); } } catch (err) { /* non-fatal: browser-tools may not be loaded */ - process.stderr.write(`gsd [auto]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `browser teardown failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } // External cleanup (not covered by session reset) @@ -863,7 +863,7 @@ export async function pauseAuto( ); } catch (err) { // Non-fatal — resume will still work via full bootstrap, just without worktree context - process.stderr.write(`gsd [auto]: file write failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `paused-session file write failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } // Close out the current unit so its runtime record doesn't stay at "dispatched" @@ -872,7 +872,7 @@ export async function pauseAuto( await closeoutUnit(ctx, s.basePath, s.currentUnit.type, s.currentUnit.id, s.currentUnit.startedAt); } catch (err) { // Non-fatal — best-effort closeout on pause - process.stderr.write(`gsd [auto]: dispatch failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `unit closeout on pause failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } s.currentUnit = null; } @@ -1097,7 +1097,7 @@ export async function startAuto( s.stepMode = meta.stepMode ?? requestedStepMode; s.paused = true; try { unlinkSync(pausedPath); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto]: pause file cleanup failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("session", `pause file cleanup failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } ctx.ui.notify( `Resuming paused custom workflow${meta.activeRunDir ? ` (${meta.activeRunDir})` : ""}.`, @@ -1110,7 +1110,7 @@ export async function startAuto( if (!mDir || summaryFile) { // Stale milestone — clean up and fall through to fresh bootstrap try { unlinkSync(pausedPath); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto]: pause file cleanup failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("session", `pause file cleanup failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } ctx.ui.notify( `Paused milestone ${meta.milestoneId} is ${!mDir ? "missing" : "already complete"}. Starting fresh.`, @@ -1123,7 +1123,7 @@ export async function startAuto( s.paused = true; // Clean up the persisted file — we're consuming it try { unlinkSync(pausedPath); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto]: pause file cleanup failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("session", `pause file cleanup failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } ctx.ui.notify( `Resuming paused session for ${meta.milestoneId}${meta.worktreePath ? ` (worktree)` : ""}.`, @@ -1134,7 +1134,7 @@ export async function startAuto( } } catch (err) { // Malformed or missing — proceed with fresh bootstrap - process.stderr.write(`gsd [auto]: operation failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("session", `paused-session restore failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } } @@ -1262,7 +1262,7 @@ export async function startAuto( syncCmuxSidebar(loadEffectiveGSDPreferences()?.preferences, await deriveState(s.basePath)); } catch (err) { // Best-effort only — sidebar sync must never block auto-mode startup - process.stderr.write(`gsd [auto]: cmux sync failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `cmux sync failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } logCmuxEvent(loadEffectiveGSDPreferences()?.preferences, requestedStepMode ? "Step-mode started." : "Auto-mode started.", "progress"); @@ -1436,7 +1436,7 @@ export async function dispatchHookUnit( await pi.setModel(match); } catch (err) { /* non-fatal */ - process.stderr.write(`gsd [auto]: dispatch failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("dispatch", `hook model set failed: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } } else { ctx.ui.notify( @@ -1474,7 +1474,7 @@ export async function dispatchHookUnit( // Ensure cwd matches basePath before hook dispatch (#1389) try { if (process.cwd() !== s.basePath) process.chdir(s.basePath); } catch (err) { - process.stderr.write(`gsd [auto]: chdir failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `chdir failed before hook dispatch: ${err instanceof Error ? err.message : String(err)}`, { file: "auto.ts" }); } debugLog("dispatchHookUnit", { diff --git a/src/resources/extensions/gsd/auto/phases.ts b/src/resources/extensions/gsd/auto/phases.ts index 297fe7e4c..a6f481066 100644 --- a/src/resources/extensions/gsd/auto/phases.ts +++ b/src/resources/extensions/gsd/auto/phases.ts @@ -1269,7 +1269,7 @@ export async function runUnitPhase( nextSteps: [], }); } catch (err) { /* non-fatal — anchor is advisory */ - process.stderr.write(`gsd [phases]: phase anchor failed: ${err instanceof Error ? err.message : String(err)}\n`); + logWarning("engine", `phase anchor failed: ${err instanceof Error ? err.message : String(err)}`); } } diff --git a/src/resources/extensions/gsd/bootstrap/agent-end-recovery.ts b/src/resources/extensions/gsd/bootstrap/agent-end-recovery.ts index 5e40359b7..ac8737d09 100644 --- a/src/resources/extensions/gsd/bootstrap/agent-end-recovery.ts +++ b/src/resources/extensions/gsd/bootstrap/agent-end-recovery.ts @@ -1,5 +1,6 @@ import type { ExtensionAPI, ExtensionContext } from "@gsd/pi-coding-agent"; +import { logWarning } from "../workflow-logger.js"; import { checkAutoStartAfterDiscuss } from "../guided-flow.js"; import { getAutoDashboardData, getAutoModeStartModel, isAutoActive, pauseAuto } from "../auto.js"; import { getNextFallbackModel, resolveModelWithFallbacksForUnit } from "../preferences.js"; @@ -85,7 +86,7 @@ export async function handleAgentEnd( } catch (err) { const message = err instanceof Error ? err.message : String(err); ctx.ui.notify(`Auto-mode error after empty-content abort: ${message}. Stopping auto-mode.`, "error"); - try { await pauseAuto(ctx, pi); } catch { /* best-effort */ } + try { await pauseAuto(ctx, pi); } catch (e) { logWarning("bootstrap", `pauseAuto failed after empty-content abort: ${(e as Error).message}`); } } return; } @@ -212,8 +213,8 @@ export async function handleAgentEnd( ctx.ui.notify(`Auto-mode error in agent_end handler: ${message}. Stopping auto-mode.`, "error"); try { await pauseAuto(ctx, pi); - } catch { - // best-effort + } catch (e) { + logWarning("bootstrap", `pauseAuto failed in agent_end handler: ${(e as Error).message}`); } } } diff --git a/src/resources/extensions/gsd/bootstrap/db-tools.ts b/src/resources/extensions/gsd/bootstrap/db-tools.ts index cded58943..83190af47 100644 --- a/src/resources/extensions/gsd/bootstrap/db-tools.ts +++ b/src/resources/extensions/gsd/bootstrap/db-tools.ts @@ -413,8 +413,8 @@ export function registerDbTools(pi: ExtensionAPI): void { try { const { insertMilestone } = await import("../gsd-db.js"); insertMilestone({ id: milestoneId, status: "queued" }); - } catch { - // Non-fatal — the safety-net in deriveStateFromDb will catch this + } catch (e) { + logError("tool", `insertMilestone failed for ${milestoneId}: ${(e as Error).message}`); } } diff --git a/src/resources/extensions/gsd/bootstrap/dynamic-tools.ts b/src/resources/extensions/gsd/bootstrap/dynamic-tools.ts index 79b5a9ae6..ec7cfba47 100644 --- a/src/resources/extensions/gsd/bootstrap/dynamic-tools.ts +++ b/src/resources/extensions/gsd/bootstrap/dynamic-tools.ts @@ -5,7 +5,7 @@ import type { ExtensionAPI } from "@gsd/pi-coding-agent"; import { createBashTool, createEditTool, createReadTool, createWriteTool } from "@gsd/pi-coding-agent"; import { DEFAULT_BASH_TIMEOUT_SECS } from "../constants.js"; -import { setLogBasePath } from "../workflow-logger.js"; +import { setLogBasePath, logWarning } from "../workflow-logger.js"; /** * Resolve the correct DB path for the current working directory. @@ -92,9 +92,7 @@ export async function ensureDbOpen(): Promise { const { migrateFromMarkdown } = await import("../md-importer.js"); migrateFromMarkdown(basePath); } catch (err) { - process.stderr.write( - `gsd-db: ensureDbOpen auto-migration failed: ${(err as Error).message}\n`, - ); + logWarning("bootstrap", `ensureDbOpen auto-migration failed: ${(err as Error).message}`); } } return opened; @@ -106,9 +104,7 @@ export async function ensureDbOpen(): Promise { return opened; } - process.stderr.write( - `gsd-db: ensureDbOpen failed — no .gsd directory found (resolvedPath=${resolveProjectRootDbPath(basePath)}, cwd=${basePath})\n`, - ); + logWarning("bootstrap", `ensureDbOpen failed — no .gsd directory found (resolvedPath=${resolveProjectRootDbPath(basePath)}, cwd=${basePath})`); return false; } catch (err) { const basePath = process.cwd(); @@ -117,9 +113,7 @@ export async function ensureDbOpen(): Promise { cwd: basePath, error: (err as Error).message ?? String(err), }; - process.stderr.write( - `gsd-db: ensureDbOpen failed — ${JSON.stringify(diagnostic)}\n`, - ); + logWarning("bootstrap", `ensureDbOpen failed — ${JSON.stringify(diagnostic)}`); return false; } } diff --git a/src/resources/extensions/gsd/bootstrap/journal-tools.ts b/src/resources/extensions/gsd/bootstrap/journal-tools.ts index 7262d0b6d..9a1aa9dec 100644 --- a/src/resources/extensions/gsd/bootstrap/journal-tools.ts +++ b/src/resources/extensions/gsd/bootstrap/journal-tools.ts @@ -2,6 +2,7 @@ import { Type } from "@sinclair/typebox"; import type { ExtensionAPI } from "@gsd/pi-coding-agent"; import { queryJournal } from "../journal.js"; +import { logWarning } from "../workflow-logger.js"; export function registerJournalTools(pi: ExtensionAPI): void { pi.registerTool({ @@ -51,7 +52,7 @@ export function registerJournalTools(pi: ExtensionAPI): void { }; } catch (err) { const msg = err instanceof Error ? err.message : String(err); - process.stderr.write(`gsd-journal: gsd_journal_query tool failed: ${msg}\n`); + logWarning("tool", `gsd_journal_query tool failed: ${msg}`); return { content: [{ type: "text" as const, text: `Error querying journal: ${msg}` }], details: { operation: "journal_query", error: msg } as any, diff --git a/src/resources/extensions/gsd/bootstrap/system-context.ts b/src/resources/extensions/gsd/bootstrap/system-context.ts index 94930375a..bf0329257 100644 --- a/src/resources/extensions/gsd/bootstrap/system-context.ts +++ b/src/resources/extensions/gsd/bootstrap/system-context.ts @@ -4,6 +4,7 @@ import { join } from "node:path"; import type { ExtensionContext } from "@gsd/pi-coding-agent"; +import { logWarning } from "../workflow-logger.js"; import { debugTime } from "../debug-logger.js"; import { loadPrompt } from "../prompt-loader.js"; import { readForensicsMarker } from "../forensics.js"; @@ -83,8 +84,8 @@ export async function buildBeforeAgentStartResult( memoryBlock = `\n\n${formatted}`; } } - } catch { - // non-fatal + } catch (e) { + logWarning("bootstrap", `memory block fetch failed: ${(e as Error).message}`); } let newSkillsBlock = ""; @@ -111,8 +112,8 @@ export async function buildBeforeAgentStartResult( : rawContent; codebaseBlock = `\n\n[PROJECT CODEBASE — File structure and descriptions (generated ${generatedAt}, may be stale — run /gsd codebase update to refresh)]\n\n${content}`; } - } catch { - // skip + } catch (e) { + logWarning("bootstrap", `CODEBASE file read failed: ${(e as Error).message}`); } } @@ -158,8 +159,8 @@ export function loadKnowledgeBlock(gsdHomeDir: string, cwd: string): { block: st globalSizeKb = Buffer.byteLength(content, "utf-8") / 1024; globalKnowledge = content; } - } catch { - // skip + } catch (e) { + logWarning("bootstrap", `global knowledge file read failed: ${(e as Error).message}`); } } @@ -170,8 +171,8 @@ export function loadKnowledgeBlock(gsdHomeDir: string, cwd: string): { block: st try { const content = readFileSync(knowledgePath, "utf-8").trim(); if (content) projectKnowledge = content; - } catch { - // skip + } catch (e) { + logWarning("bootstrap", `project knowledge file read failed: ${(e as Error).message}`); } } @@ -429,8 +430,8 @@ export function clearForensicsMarker(basePath: string): void { if (existsSync(markerPath)) { try { unlinkSync(markerPath); - } catch { - // non-fatal + } catch (e) { + logWarning("bootstrap", `unlinkSync forensics marker failed: ${(e as Error).message}`); } } } diff --git a/src/resources/extensions/gsd/commands-inspect.ts b/src/resources/extensions/gsd/commands-inspect.ts index 87eb494b1..5421c00bf 100644 --- a/src/resources/extensions/gsd/commands-inspect.ts +++ b/src/resources/extensions/gsd/commands-inspect.ts @@ -8,6 +8,7 @@ import type { ExtensionCommandContext } from "@gsd/pi-coding-agent"; import { existsSync } from "node:fs"; import { join } from "node:path"; import { gsdRoot } from "./paths.js"; +import { logWarning } from "./workflow-logger.js"; import { getErrorMessage } from "./error-utils.js"; export interface InspectData { @@ -92,7 +93,7 @@ export async function handleInspect(ctx: ExtensionCommandContext): Promise ctx.ui.notify(formatInspectOutput(data), "info"); } catch (err) { - process.stderr.write(`gsd-db: /gsd inspect failed: ${getErrorMessage(err)}\n`); + logWarning("command", `/gsd inspect failed: ${getErrorMessage(err)}`); ctx.ui.notify("Failed to inspect GSD database. Check stderr for details.", "error"); } } diff --git a/src/resources/extensions/gsd/commands-maintenance.ts b/src/resources/extensions/gsd/commands-maintenance.ts index d2661a605..09d9df9dc 100644 --- a/src/resources/extensions/gsd/commands-maintenance.ts +++ b/src/resources/extensions/gsd/commands-maintenance.ts @@ -7,12 +7,14 @@ import type { ExtensionCommandContext } from "@gsd/pi-coding-agent"; import { deriveState } from "./state.js"; import { nativeBranchList, nativeDetectMainBranch, nativeBranchListMerged, nativeBranchDelete, nativeForEachRef, nativeUpdateRef } from "./native-git-bridge.js"; +import { logWarning } from "./workflow-logger.js"; export async function handleCleanupBranches(ctx: ExtensionCommandContext, basePath: string): Promise { let branches: string[]; try { branches = nativeBranchList(basePath, "gsd/*"); - } catch { + } catch (e) { + logWarning("command", `branch list failed: ${(e as Error).message}`); ctx.ui.notify("No GSD branches to clean up.", "info"); return; } @@ -23,7 +25,8 @@ export async function handleCleanupBranches(ctx: ExtensionCommandContext, basePa let merged: string[]; try { merged = nativeBranchListMerged(basePath, mainBranch, "gsd/*"); - } catch { + } catch (e) { + logWarning("command", `merged branch list failed: ${(e as Error).message}`); merged = []; } @@ -33,8 +36,8 @@ export async function handleCleanupBranches(ctx: ExtensionCommandContext, basePa try { nativeBranchDelete(basePath, branch, false); deletedMerged++; - } catch { - /* skip branches that cannot be deleted */ + } catch (e) { + logWarning("command", `branch delete failed for ${branch}: ${(e as Error).message}`); } } @@ -66,7 +69,7 @@ export async function handleCleanupBranches(ctx: ExtensionCommandContext, basePa try { nativeBranchDelete(basePath, branch, true); deletedStaleMilestones++; - } catch { /* non-fatal */ } + } catch (e) { logWarning("command", `stale milestone branch delete failed for ${branch}: ${(e as Error).message}`); } continue; } } @@ -77,7 +80,8 @@ export async function handleCleanupBranches(ctx: ExtensionCommandContext, basePa let roadmapContent: string | null = null; try { roadmapContent = await loadFile(roadmapPath); - } catch { + } catch (e) { + logWarning("command", `loadFile failed for ${roadmapPath}: ${(e as Error).message}`); roadmapContent = null; } if (!roadmapContent) continue; @@ -85,12 +89,12 @@ export async function handleCleanupBranches(ctx: ExtensionCommandContext, basePa try { nativeBranchDelete(basePath, branch, true); deletedStaleMilestones++; - } catch { - /* non-fatal */ + } catch (e) { + logWarning("command", `milestone branch delete failed for ${branch}: ${(e as Error).message}`); } } - } catch { - /* non-fatal */ + } catch (e) { + logWarning("command", `stale milestone cleanup failed: ${(e as Error).message}`); } const summary: string[] = []; @@ -122,7 +126,8 @@ export async function handleCleanupSnapshots(ctx: ExtensionCommandContext, baseP let refs: string[]; try { refs = nativeForEachRef(basePath, "refs/gsd/snapshots/"); - } catch { + } catch (e) { + logWarning("command", `snapshot ref list failed: ${(e as Error).message}`); ctx.ui.notify("No snapshot refs to clean up.", "info"); return; } @@ -147,8 +152,8 @@ export async function handleCleanupSnapshots(ctx: ExtensionCommandContext, baseP try { nativeUpdateRef(basePath, old); pruned++; - } catch { - /* skip individual failures */ + } catch (e) { + logWarning("command", `snapshot ref update failed for ${old}: ${(e as Error).message}`); } } } @@ -164,7 +169,8 @@ export async function handleCleanupWorktrees(ctx: ExtensionCommandContext, baseP let statuses; try { statuses = getAllWorktreeHealth(basePath); - } catch { + } catch (e) { + logWarning("command", `worktree health inspection failed: ${(e as Error).message}`); ctx.ui.notify("Failed to inspect worktrees.", "error"); return; } @@ -197,7 +203,8 @@ export async function handleCleanupWorktrees(ctx: ExtensionCommandContext, baseP removeWorktree(basePath, wt.name, { deleteBranch: true }); lines.push(` ✓ ${wt.name} removed (branch ${wt.branch} deleted)`); removed++; - } catch { + } catch (e) { + logWarning("command", `worktree removal failed for ${wt.name}: ${(e as Error).message}`); lines.push(` ✗ ${wt.name} failed to remove`); } } @@ -246,7 +253,7 @@ export async function handleSkip(unitArg: string, ctx: ExtensionCommandContext, if (fileExists(completedKeysFile)) { keys = JSON.parse(readFile(completedKeysFile, "utf-8")); } - } catch { /* start fresh */ } + } catch (e) { logWarning("command", `completed-units.json parse failed: ${(e as Error).message}`); } // Normalize: accept "execute-task/M001/S01/T03", "M001/S01/T03", or just "T03" let skipKey = unitArg; @@ -371,7 +378,8 @@ export async function handleCleanupProjects(args: string, ctx: ExtensionCommandC hashList = readdirSync(projectsDir, { withFileTypes: true }) .filter(e => e.isDirectory()) .map(e => e.name); - } catch { + } catch (e) { + logWarning("command", `readdir failed for project-state directory: ${(e as Error).message}`); ctx.ui.notify(`Failed to read project-state directory at ${projectsDir}.`, "error"); return; } @@ -454,7 +462,8 @@ export async function handleCleanupProjects(args: string, ctx: ExtensionCommandC try { fsRmSync(pathJoin(projectsDir, e.hash), { recursive: true, force: true }); removed++; - } catch { + } catch (err) { + logWarning("command", `project cleanup rm failed for ${e.hash}: ${(err as Error).message}`); failed.push(e.hash); } } @@ -529,7 +538,7 @@ export async function handleRecover(ctx: ExtensionCommandContext, basePath: stri ctx.ui.notify(lines.join("\n"), "success"); } catch (err) { const msg = err instanceof Error ? err.message : String(err); - process.stderr.write(`gsd-recover: failed: ${msg}\n`); + logWarning("command", `recover failed: ${msg}`); ctx.ui.notify(`gsd recover failed: ${msg}`, "error"); } } diff --git a/src/resources/extensions/gsd/custom-verification.ts b/src/resources/extensions/gsd/custom-verification.ts index 4d60c507b..77d76d30e 100644 --- a/src/resources/extensions/gsd/custom-verification.ts +++ b/src/resources/extensions/gsd/custom-verification.ts @@ -17,6 +17,7 @@ * - The frozen DEFINITION.yaml on disk is the single source of truth for step policies. */ +import { logWarning } from "./workflow-logger.js"; import { readFileSync, existsSync, statSync } from "node:fs"; import { join, resolve, sep } from "node:path"; import { spawnSync } from "node:child_process"; @@ -130,8 +131,8 @@ function handleContentHeuristic( if (!new RegExp(verify.pattern).test(content)) { return "pause"; } - } catch { - // Invalid regex at runtime — treat as verification failure + } catch (e) { + logWarning("engine", `content-heuristic regex failed: ${(e as Error).message}`); return "pause"; } } diff --git a/src/resources/extensions/gsd/gsd-db.ts b/src/resources/extensions/gsd/gsd-db.ts index 4edea929b..18bc760ef 100644 --- a/src/resources/extensions/gsd/gsd-db.ts +++ b/src/resources/extensions/gsd/gsd-db.ts @@ -10,7 +10,7 @@ import { existsSync, copyFileSync, mkdirSync, realpathSync } from "node:fs"; import { dirname } from "node:path"; import type { Decision, Requirement, GateRow, GateId, GateScope, GateStatus, GateVerdict } from "./types.js"; import { GSDError, GSD_STALE_STATE } from "./errors.js"; -import { logError } from "./workflow-logger.js"; +import { logError, logWarning } from "./workflow-logger.js"; const _require = createRequire(import.meta.url); @@ -787,11 +787,11 @@ export function openDatabase(path: string): boolean { initSchema(adapter, fileBacked); process.stderr.write("gsd-db: recovered corrupt database via VACUUM\n"); } catch (retryErr) { - try { adapter.close(); } catch { /* swallow */ } + try { adapter.close(); } catch (e) { logWarning("db", `close after VACUUM failed: ${(e as Error).message}`); } throw retryErr; } } else { - try { adapter.close(); } catch { /* swallow */ } + try { adapter.close(); } catch (e) { logWarning("db", `close after VACUUM failed: ${(e as Error).message}`); } throw err; } } @@ -802,7 +802,7 @@ export function openDatabase(path: string): boolean { if (!_exitHandlerRegistered) { _exitHandlerRegistered = true; - process.on("exit", () => { try { closeDatabase(); } catch {} }); + process.on("exit", () => { try { closeDatabase(); } catch (e) { logWarning("db", `exit handler close failed: ${(e as Error).message}`); } }); } return true; @@ -812,16 +812,14 @@ export function closeDatabase(): void { if (currentDb) { try { currentDb.exec('PRAGMA wal_checkpoint(TRUNCATE)'); - } catch { /* non-fatal — best effort before close */ } + } catch (e) { logWarning("db", `WAL checkpoint failed: ${(e as Error).message}`); } try { // Incremental vacuum to reclaim space without blocking currentDb.exec('PRAGMA incremental_vacuum(64)'); - } catch { /* non-fatal */ } + } catch (e) { logWarning("db", `incremental vacuum failed: ${(e as Error).message}`); } try { currentDb.close(); - } catch { - // swallow close errors - } + } catch (e) { logWarning("db", `database close failed: ${(e as Error).message}`); } currentDb = null; currentPath = null; currentPid = 0; @@ -833,7 +831,7 @@ export function vacuumDatabase(): void { if (!currentDb) return; try { currentDb.exec('VACUUM'); - } catch { /* non-fatal */ } + } catch (e) { logWarning("db", `VACUUM failed: ${(e as Error).message}`); } } let _txDepth = 0; @@ -1038,7 +1036,7 @@ export function upsertRequirement(r: Requirement): void { export function clearArtifacts(): void { if (!currentDb) return; - try { currentDb.exec("DELETE FROM artifacts"); } catch { /* cache clear is best effort */ } + try { currentDb.exec("DELETE FROM artifacts"); } catch (e) { logWarning("db", `clearArtifacts failed: ${(e as Error).message}`); } } export function insertArtifact(a: { @@ -1801,7 +1799,7 @@ export function reconcileWorktreeDb( // ATTACHing a WAL-mode DB to itself corrupts the WAL (#2823). try { if (realpathSync(mainDbPath) === realpathSync(worktreeDbPath)) return zero; - } catch { /* path resolution failed — fall through to existing checks */ } + } catch (e) { logWarning("db", `realpathSync failed: ${(e as Error).message}`); } // Sanitize path: reject any characters that could break ATTACH syntax. // ATTACH DATABASE doesn't support parameterized paths in all providers, // so we use strict allowlist validation instead. @@ -1938,12 +1936,12 @@ export function reconcileWorktreeDb( adapter.exec("COMMIT"); } catch (txErr) { - try { adapter.exec("ROLLBACK"); } catch { /* best effort */ } + try { adapter.exec("ROLLBACK"); } catch (e) { logWarning("db", `rollback failed: ${(e as Error).message}`); } throw txErr; } return { ...merged, conflicts }; } finally { - try { adapter.exec("DETACH DATABASE wt"); } catch { /* best effort */ } + try { adapter.exec("DETACH DATABASE wt"); } catch (e) { logWarning("db", `detach worktree DB failed: ${(e as Error).message}`); } } } catch (err) { logError("db", "worktree DB reconciliation failed", { error: (err as Error).message }); diff --git a/src/resources/extensions/gsd/guided-flow.ts b/src/resources/extensions/gsd/guided-flow.ts index ca71b5095..6b5f0d30b 100644 --- a/src/resources/extensions/gsd/guided-flow.ts +++ b/src/resources/extensions/gsd/guided-flow.ts @@ -52,6 +52,7 @@ export { buildExistingMilestonesContext, } from "./guided-flow-queue.js"; import { getErrorMessage } from "./error-utils.js"; +import { logWarning } from "./workflow-logger.js"; // ─── ID Generation with Reservation ───────────────────────────────────────── @@ -180,7 +181,7 @@ export function checkAutoStartAfterDiscuss(): boolean { ); } } - } catch { /* non-fatal — PROJECT.md parsing failure shouldn't block auto-start */ } + } catch (e) { logWarning("guided", `PROJECT.md parsing failed: ${(e as Error).message}`); } } // Gate 4: Discussion manifest process verification (multi-milestone only) @@ -212,7 +213,7 @@ export function checkAutoStartAfterDiscuss(): boolean { ); } } - } catch { /* malformed manifest — warn but don't block */ } + } catch (e) { logWarning("guided", `discussion manifest verification failed: ${(e as Error).message}`); } } // Draft promotion cleanup: if a CONTEXT-DRAFT.md exists alongside the new @@ -220,16 +221,16 @@ export function checkAutoStartAfterDiscuss(): boolean { try { const draftFile = resolveMilestoneFile(basePath, milestoneId, "CONTEXT-DRAFT"); if (draftFile) unlinkSync(draftFile); - } catch { /* non-fatal — stale draft doesn't break anything, CONTEXT.md wins */ } + } catch (e) { logWarning("guided", `CONTEXT-DRAFT.md unlink failed: ${(e as Error).message}`); } // Cleanup: remove discussion manifest after auto-start (only needed during discussion) - try { unlinkSync(manifestPath); } catch { /* may not exist for single-milestone */ } + try { unlinkSync(manifestPath); } catch (e) { logWarning("guided", `manifest unlink failed: ${(e as Error).message}`); } pendingAutoStartMap.delete(basePath); ctx.ui.notify(`Milestone ${milestoneId} ready.`, "info"); startAuto(ctx, pi, basePath, false, { step }).catch((err) => { ctx.ui.notify(`Auto-start failed: ${getErrorMessage(err)}`, "error"); - if (process.env.GSD_DEBUG) console.error('[gsd] auto start error:', err); + logWarning("guided", `auto start error: ${getErrorMessage(err)}`); debugLog("auto-start-failed", { error: getErrorMessage(err) }); }); return true; @@ -895,8 +896,8 @@ function selfHealRuntimeRecords(basePath: string, ctx: ExtensionContext): { clea ctx.ui.notify(`Self-heal: cleared ${cleared} stale runtime record(s) from a previous session.`, "info"); } return { cleared }; - } catch { - // Non-fatal — self-heal should never block the wizard + } catch (e) { + logWarning("guided", `self-heal stale runtime records failed: ${(e as Error).message}`); return { cleared: 0 }; } } @@ -1142,7 +1143,7 @@ export async function showSmartEntry( ); return; } - } catch { /* directory exists but unreadable — fall through to normal flow */ } + } catch (e) { logWarning("guided", `directory read failed: ${(e as Error).message}`); } } } diff --git a/src/resources/extensions/gsd/markdown-renderer.ts b/src/resources/extensions/gsd/markdown-renderer.ts index 5e9eda89b..6f3ab0f45 100644 --- a/src/resources/extensions/gsd/markdown-renderer.ts +++ b/src/resources/extensions/gsd/markdown-renderer.ts @@ -9,6 +9,7 @@ // parseRoadmap(), parsePlan(), parseSummary() in files.ts. import { readFileSync, existsSync, mkdirSync } from "node:fs"; +import { logWarning } from "./workflow-logger.js"; import { isClosedStatus } from "./status-guards.js"; import { join, relative } from "node:path"; import { createRequire } from "node:module"; @@ -93,9 +94,7 @@ function loadArtifactContent( try { content = readFileSync(absPath, "utf-8"); } catch { - process.stderr.write( - `markdown-renderer: cannot read file from disk: ${absPath}\n`, - ); + logWarning("renderer", `cannot read file from disk: ${absPath}`); return null; } @@ -111,9 +110,7 @@ function loadArtifactContent( }); } catch { // Non-fatal: we have the content, DB storage is best-effort - process.stderr.write( - `markdown-renderer: warning — failed to store disk fallback in DB: ${artifactPath}\n`, - ); + logWarning("renderer", `failed to store disk fallback in DB: ${artifactPath}`); } return content; @@ -146,9 +143,7 @@ async function writeAndStore( }); } catch { // Non-fatal: file is on disk, DB is best-effort - process.stderr.write( - `markdown-renderer: warning — failed to update artifact in DB: ${artifactPath}\n`, - ); + logWarning("renderer", `failed to update artifact in DB: ${artifactPath}`); } invalidateCaches(); @@ -806,7 +801,8 @@ export function detectStaleRenders(basePath: string): StaleEntry[] { try { const m = _require("./parsers-legacy.ts"); parseRoadmap = m.parseRoadmap; parsePlan = m.parsePlan; - } catch { + } catch (e) { + logWarning("renderer", `parsers-legacy.ts require failed, falling back to .js: ${(e as Error).message}`); const m = _require("./parsers-legacy.js"); parseRoadmap = m.parseRoadmap; parsePlan = m.parsePlan; } @@ -841,8 +837,8 @@ export function detectStaleRenders(basePath: string): StaleEntry[] { }); } } - } catch { - // Can't parse roadmap — skip silently + } catch (e) { + logWarning("renderer", `roadmap parse failed: ${(e as Error).message}`); } } @@ -874,8 +870,8 @@ export function detectStaleRenders(basePath: string): StaleEntry[] { }); } } - } catch { - // Can't parse plan — skip silently + } catch (e) { + logWarning("renderer", `plan parse failed: ${(e as Error).message}`); } } @@ -1025,9 +1021,7 @@ export async function repairStaleRenders(basePath: string): Promise { } } } catch (err) { - process.stderr.write( - `markdown-renderer: repair failed for ${entry.path}: ${(err as Error).message}\n`, - ); + logWarning("renderer", `repair failed for ${entry.path}: ${(err as Error).message}`); } } diff --git a/src/resources/extensions/gsd/md-importer.ts b/src/resources/extensions/gsd/md-importer.ts index f0ba20231..dfea9ad7c 100644 --- a/src/resources/extensions/gsd/md-importer.ts +++ b/src/resources/extensions/gsd/md-importer.ts @@ -31,6 +31,7 @@ import { import { findMilestoneIds } from './guided-flow.js'; import { parseRoadmap, parsePlan } from './parsers-legacy.js'; import { parseContextDependsOn } from './files.js'; +import { logWarning } from './workflow-logger.js'; // ─── DECISIONS.md Parser ─────────────────────────────────────────────────── @@ -712,25 +713,25 @@ export function migrateFromMarkdown(gsdDir: string): { try { decisions = importDecisions(gsdDir); } catch (err) { - process.stderr.write(`gsd-migrate: skipping decisions import: ${(err as Error).message}\n`); + logWarning("migration", `skipping decisions import: ${(err as Error).message}`); } try { requirements = importRequirements(gsdDir); } catch (err) { - process.stderr.write(`gsd-migrate: skipping requirements import: ${(err as Error).message}\n`); + logWarning("migration", `skipping requirements import: ${(err as Error).message}`); } try { artifacts = importHierarchyArtifacts(gsdDir); } catch (err) { - process.stderr.write(`gsd-migrate: skipping artifacts import: ${(err as Error).message}\n`); + logWarning("migration", `skipping artifacts import: ${(err as Error).message}`); } try { hierarchy = migrateHierarchyToDb(gsdDir); } catch (err) { - process.stderr.write(`gsd-migrate: skipping hierarchy migration: ${(err as Error).message}\n`); + logWarning("migration", `skipping hierarchy migration: ${(err as Error).message}`); } }); diff --git a/src/resources/extensions/gsd/milestone-actions.ts b/src/resources/extensions/gsd/milestone-actions.ts index 7615a1eb9..49102dc25 100644 --- a/src/resources/extensions/gsd/milestone-actions.ts +++ b/src/resources/extensions/gsd/milestone-actions.ts @@ -21,6 +21,7 @@ import { import { invalidateAllCaches } from "./cache.js"; import { loadQueueOrder, saveQueueOrder } from "./queue-order.js"; import { isDbAvailable, updateMilestoneStatus } from "./gsd-db.js"; +import { logWarning } from "./workflow-logger.js"; // ─── Park ────────────────────────────────────────────────────────────────── @@ -58,7 +59,7 @@ export function parkMilestone(basePath: string, milestoneId: string, reason: str try { updateMilestoneStatus(milestoneId, "parked"); } catch (err) { - process.stderr.write(`gsd: parkMilestone DB sync failed for ${milestoneId}: ${(err as Error).message}\n`); + logWarning("engine", `parkMilestone DB sync failed for ${milestoneId}: ${(err as Error).message}`); } } invalidateAllCaches(); @@ -84,7 +85,7 @@ export function unparkMilestone(basePath: string, milestoneId: string): boolean try { updateMilestoneStatus(milestoneId, "active"); } catch (err) { - process.stderr.write(`gsd: unparkMilestone DB sync failed for ${milestoneId}: ${(err as Error).message}\n`); + logWarning("engine", `unparkMilestone DB sync failed for ${milestoneId}: ${(err as Error).message}`); } } invalidateAllCaches(); diff --git a/src/resources/extensions/gsd/milestone-ids.ts b/src/resources/extensions/gsd/milestone-ids.ts index aa44c8f87..3d6d9592d 100644 --- a/src/resources/extensions/gsd/milestone-ids.ts +++ b/src/resources/extensions/gsd/milestone-ids.ts @@ -6,6 +6,7 @@ */ import { randomInt } from "node:crypto"; +import { logWarning } from "./workflow-logger.js"; import { readdirSync, existsSync } from "node:fs"; import { milestonesDir } from "./paths.js"; import { loadQueueOrder, sortByQueueOrder } from "./queue-order.js"; @@ -128,7 +129,7 @@ export function findMilestoneIds(basePath: string): string[] { } catch (err) { // Log why milestone scanning failed — silent [] here causes infinite loops (#456) if (existsSync(dir)) { - console.error(`[gsd] findMilestoneIds: .gsd/milestones/ exists but readdirSync failed — ${getErrorMessage(err)}`); + logWarning("engine", `findMilestoneIds: .gsd/milestones/ exists but readdirSync failed — ${getErrorMessage(err)}`); } return []; } diff --git a/src/resources/extensions/gsd/parallel-merge.ts b/src/resources/extensions/gsd/parallel-merge.ts index e777a5a35..09a179869 100644 --- a/src/resources/extensions/gsd/parallel-merge.ts +++ b/src/resources/extensions/gsd/parallel-merge.ts @@ -15,6 +15,7 @@ import { MergeConflictError } from "./git-service.js"; import { removeSessionStatus } from "./session-status-io.js"; import type { WorkerInfo } from "./parallel-orchestrator.js"; import { getErrorMessage } from "./error-utils.js"; +import { logWarning } from "./workflow-logger.js"; // ─── Types ───────────────────────────────────────────────────────────────── @@ -47,7 +48,8 @@ export function isMilestoneCompleteInWorktreeDb(basePath: string, mid: string): { timeout: 3000, encoding: "utf-8" }, ); return (result.stdout || "").trim() === "complete"; - } catch { + } catch (e) { + logWarning("parallel", `spawnSync milestone completion check failed for ${mid}: ${(e as Error).message}`); return false; } } @@ -65,8 +67,8 @@ function discoverDbCompletedMilestones(basePath: string): Set { completed.add(entry); } } - } catch { - // worktrees dir may not exist + } catch (e) { + logWarning("parallel", `readdirSync for completed set failed: ${(e as Error).message}`); } return completed; } diff --git a/src/resources/extensions/gsd/parallel-orchestrator.ts b/src/resources/extensions/gsd/parallel-orchestrator.ts index 95b87d738..37f21bb89 100644 --- a/src/resources/extensions/gsd/parallel-orchestrator.ts +++ b/src/resources/extensions/gsd/parallel-orchestrator.ts @@ -41,6 +41,7 @@ import { type ParallelCandidates, } from "./parallel-eligibility.js"; import { getErrorMessage } from "./error-utils.js"; +import { logWarning } from "./workflow-logger.js"; // ─── Types ───────────────────────────────────────────────────────────────── @@ -126,7 +127,7 @@ export function persistState(basePath: string): void { const tmp = dest + TMP_SUFFIX; writeFileSync(tmp, JSON.stringify(persisted, null, 2), "utf-8"); renameSync(tmp, dest); - } catch { /* non-fatal */ } + } catch (e) { logWarning("parallel", `persist parallel state failed: ${(e as Error).message}`); } } /** @@ -136,7 +137,7 @@ function removeStateFile(basePath: string): void { try { const p = stateFilePath(basePath); if (existsSync(p)) unlinkSync(p); - } catch { /* non-fatal */ } + } catch (e) { logWarning("parallel", `clear parallel state file failed: ${(e as Error).message}`); } } function isPidAlive(pid: number): boolean { @@ -144,7 +145,8 @@ function isPidAlive(pid: number): boolean { try { process.kill(pid, 0); return true; - } catch { + } catch (e) { + logWarning("parallel", `pid alive check failed for pid ${pid}: ${(e as Error).message}`); return false; } } @@ -176,7 +178,8 @@ export function restoreState(basePath: string): PersistedState | null { } return persisted; - } catch { + } catch (e) { + logWarning("parallel", `readParallelState JSON parse failed: ${(e as Error).message}`); return null; } } @@ -190,8 +193,8 @@ function appendWorkerLog(basePath: string, milestoneId: string, chunk: string): const dir = join(gsdRoot(basePath), "parallel"); if (!existsSync(dir)) mkdirSync(dir, { recursive: true }); appendFileSync(workerLogPath(basePath, milestoneId), chunk, "utf-8"); - } catch { - // Non-fatal — diagnostics should never break orchestration. + } catch (e) { + logWarning("parallel", `appendFileSync worker log failed for ${milestoneId}: ${(e as Error).message}`); } } @@ -430,9 +433,8 @@ export async function startParallel( let wtPath: string; try { wtPath = createMilestoneWorktree(basePath, mid); - } catch { - // Worktree creation may fail in test environments or when git - // is not available. Fall back to a placeholder path. + } catch (e) { + logWarning("parallel", `createMilestoneWorktree fallback for ${mid}: ${(e as Error).message}`); wtPath = worktreePath(basePath, mid); } @@ -564,7 +566,8 @@ export function spawnWorker( stdio: ["ignore", "pipe", "pipe"], detached: false, }); - } catch { + } catch (e) { + logWarning("parallel", `spawnSync worker failed for ${milestoneId}: ${(e as Error).message}`); return false; } @@ -694,7 +697,8 @@ function resolveGsdBin(): string | null { let thisDir: string; try { thisDir = dirname(fileURLToPath(import.meta.url)); - } catch { + } catch (e) { + logWarning("parallel", `dirname(fileURLToPath) failed: ${(e as Error).message}`); thisDir = process.cwd(); } const candidates = [ @@ -721,8 +725,9 @@ function processWorkerLine(basePath: string, milestoneId: string, line: string): let event: Record; try { event = JSON.parse(line); - } catch { - return; // Not valid JSON — skip (stderr leakage, debug output, etc.) + } catch (e) { + logWarning("parallel", `JSON.parse(line) from worker output failed: ${(e as Error).message}`); + return; } const type = String(event.type ?? ""); @@ -817,7 +822,7 @@ export async function stopParallel( } else if (worker.pid !== process.pid) { process.kill(worker.pid, "SIGTERM"); } - } catch { /* process may already be dead */ } + } catch (e) { logWarning("parallel", `process.kill SIGTERM failed for pid ${worker.pid}: ${(e as Error).message}`); } } // Wait for the headless process to cascade SIGTERM to its RPC child. @@ -833,7 +838,7 @@ export async function stopParallel( } else if (worker.pid !== process.pid) { process.kill(worker.pid, "SIGKILL"); } - } catch { /* process may already be dead */ } + } catch (e) { logWarning("parallel", `process.kill SIGKILL failed for pid ${worker.pid}: ${(e as Error).message}`); } await waitForWorkerExit(worker, 250); } diff --git a/src/resources/extensions/gsd/preferences.ts b/src/resources/extensions/gsd/preferences.ts index 71183cb0b..50a08a937 100644 --- a/src/resources/extensions/gsd/preferences.ts +++ b/src/resources/extensions/gsd/preferences.ts @@ -19,6 +19,7 @@ import { parse as parseYaml } from "yaml"; import type { PostUnitHookConfig, PreDispatchHookConfig, TokenProfile } from "./types.js"; import type { DynamicRoutingConfig } from "./model-router.js"; import { normalizeStringArray } from "../shared/format-utils.js"; +import { logWarning } from "./workflow-logger.js"; import { resolveProfileDefaults as _resolveProfileDefaults } from "./preferences-models.js"; import { @@ -237,7 +238,7 @@ function parseFrontmatterBlock(frontmatter: string): GSDPreferences { } return parsed as GSDPreferences; } catch (e) { - console.error("[parseFrontmatterBlock] YAML parse error:", e); + logWarning("guided", `YAML parse error in frontmatter block: ${(e as Error).message}`); return {} as GSDPreferences; } } @@ -296,8 +297,8 @@ function parseHeadingListFormat(content: string): GSDPreferences { } typed[targetSection] = value; - } catch { - /* malformed section — skip */ + } catch (e) { + logWarning("guided", `preferences section parse failed: ${(e as Error).message}`); } } diff --git a/src/resources/extensions/gsd/prompt-loader.ts b/src/resources/extensions/gsd/prompt-loader.ts index 2a92984a1..d2e2c4a5b 100644 --- a/src/resources/extensions/gsd/prompt-loader.ts +++ b/src/resources/extensions/gsd/prompt-loader.ts @@ -22,6 +22,7 @@ import { GSDError, GSD_PARSE_ERROR } from "./errors.js"; import { join, dirname } from "node:path"; import { fileURLToPath } from "node:url"; import { homedir } from "node:os"; +import { logWarning } from "./workflow-logger.js"; /** * Resolve the GSD extension directory. @@ -72,7 +73,7 @@ function warmCache(): void { // prompts/ may not exist in test environments — lazy loading still works. // Emit a diagnostic when running outside tests so wrong-path bugs are visible. if (!process.env.VITEST && !process.env.NODE_TEST) { - process.stderr.write(`[gsd:prompt-loader] warmCache: prompts dir not found: ${promptsDir}\n`); + logWarning("prompt", `warmCache: prompts dir not found: ${promptsDir}`); } } @@ -87,7 +88,7 @@ function warmCache(): void { } catch { // templates/ may not exist in test environments — lazy loading still works. if (!process.env.VITEST && !process.env.NODE_TEST) { - process.stderr.write(`[gsd:prompt-loader] warmCache: templates dir not found: ${templatesDir}\n`); + logWarning("prompt", `warmCache: templates dir not found: ${templatesDir}`); } } } diff --git a/src/resources/extensions/gsd/rule-registry.ts b/src/resources/extensions/gsd/rule-registry.ts index e61893606..7a697257a 100644 --- a/src/resources/extensions/gsd/rule-registry.ts +++ b/src/resources/extensions/gsd/rule-registry.ts @@ -6,6 +6,7 @@ // // A module-level singleton accessor allows existing code to migrate incrementally. +import { logWarning } from "./workflow-logger.js"; import type { UnifiedRule, RulePhase } from "./rule-types.js"; import type { DispatchAction, DispatchContext, DispatchRule } from "./auto-dispatch.js"; import type { @@ -387,8 +388,8 @@ export class RuleRegistry { const dir = join(basePath, ".gsd"); if (!existsSync(dir)) mkdirSync(dir, { recursive: true }); writeFileSync(this._hookStatePath(basePath), JSON.stringify(state, null, 2), "utf-8"); - } catch { - // Non-fatal — state is recreatable from artifacts + } catch (e) { + logWarning("registry", `failed to persist hook state: ${(e as Error).message}`); } } @@ -407,8 +408,8 @@ export class RuleRegistry { } } } - } catch { - // Non-fatal — fresh state is fine + } catch (e) { + logWarning("registry", `failed to restore hook state: ${(e as Error).message}`); } } @@ -423,8 +424,8 @@ export class RuleRegistry { "utf-8", ); } - } catch { - // Non-fatal + } catch (e) { + logWarning("registry", `failed to clear hook state: ${(e as Error).message}`); } } diff --git a/src/resources/extensions/gsd/safe-fs.ts b/src/resources/extensions/gsd/safe-fs.ts index 8872b8b28..3080c00be 100644 --- a/src/resources/extensions/gsd/safe-fs.ts +++ b/src/resources/extensions/gsd/safe-fs.ts @@ -1,23 +1,24 @@ import { existsSync, mkdirSync, cpSync, type CopySyncOptions } from "node:fs" import { dirname } from "node:path" +import { logWarning } from "./workflow-logger.js" /** * Safely creates a directory. Returns true if successful, false on error. - * Logs to stderr when GSD_DEBUG is set. + * Logs warnings via workflow-logger on failure. */ export function safeMkdir(dirPath: string): boolean { try { mkdirSync(dirPath, { recursive: true }) return true } catch (err) { - if (process.env.GSD_DEBUG) console.error(`[gsd] mkdir failed: ${dirPath}`, err) + logWarning("fs", `mkdir failed: ${dirPath}: ${(err as Error).message}`) return false } } /** * Safely copies src to dst. Returns true if successful, false if src doesn't exist or copy fails. - * Logs to stderr when GSD_DEBUG is set. + * Logs warnings via workflow-logger on failure. */ export function safeCopy(src: string, dst: string, opts?: CopySyncOptions): boolean { if (!existsSync(src)) return false @@ -25,7 +26,7 @@ export function safeCopy(src: string, dst: string, opts?: CopySyncOptions): bool cpSync(src, dst, opts) return true } catch (err) { - if (process.env.GSD_DEBUG) console.error(`[gsd] copy failed: ${src} → ${dst}`, err) + logWarning("fs", `copy failed: ${src} → ${dst}: ${(err as Error).message}`) return false } } @@ -41,7 +42,7 @@ export function safeCopyRecursive(src: string, dst: string, opts?: Omit l && !l.startsWith("//") && !l.startsWith("/*") && !l.startsWith("*") && l !== "}", ); @@ -90,16 +175,79 @@ function findEmptyCatches(filePath: string): Array<{ line: number; text: string return results; } -describe("auto-mode diagnostic catch blocks (#3348)", () => { - test("no empty catch blocks remain in auto-mode files", () => { - const files = getAutoModeFiles(); - assert.ok(files.length > 0, "should find auto-mode source files"); +/** + * Scan a file for catch blocks that use raw process.stderr.write or + * console.error/warn instead of workflow-logger. + */ +function findRawStderrCatches(filePath: string): Array<{ line: number; text: string }> { + const content = readFileSync(filePath, "utf-8"); + const lines = content.split("\n"); + const results: Array<{ line: number; text: string }> = []; + + for (let i = 0; i < lines.length; i++) { + const line = lines[i]; + if (!/\}\s*catch\s*(\([^)]*\))?\s*\{/.test(line)) continue; + + // Inline single-line catch + const inlineMatch = line.match(/\}\s*catch\s*(\([^)]*\))?\s*\{(.*)\}\s*;?\s*$/); + if (inlineMatch) { + const body = inlineMatch[2]; + if (!LOGGER_PATTERNS.some((p) => p.test(body))) { + if (/process\.stderr\.write/.test(body) || /console\.(error|warn)/.test(body)) { + results.push({ line: i + 1, text: line.trim() }); + } + } + continue; + } + + // Multi-line catch + let j = i + 1; + let depth = 1; + const bodyLines: string[] = []; + while (j < lines.length && depth > 0) { + for (const ch of lines[j]) { + if (ch === "{") depth++; + else if (ch === "}") depth--; + } + bodyLines.push(lines[j]); + j++; + } + + const bodyText = bodyLines.slice(0, -1).join("\n"); + if (!LOGGER_PATTERNS.some((p) => p.test(bodyText))) { + if (/process\.stderr\.write/.test(bodyText) || /console\.(error|warn)/.test(bodyText)) { + results.push({ line: i + 1, text: line.trim() }); + } + } + } + + return results; +} + +describe("workflow-logger coverage (#3348)", () => { + test("no empty catch blocks remain in migrated files", () => { + // Combine auto-mode files + explicitly migrated files + const autoFiles = getAutoModeFiles(); + const allFiles = getGsdSourceFiles(); + const migratedPaths = new Set(autoFiles); + for (const file of allFiles) { + const rel = relative(gsdDir, file); + if (MIGRATED_FILES.has(rel)) { + migratedPaths.add(file); + } + } + + assert.ok(migratedPaths.size > 0, "should find migrated source files"); const violations: string[] = []; - for (const file of files) { + for (const file of migratedPaths) { + const rel = relative(gsdDir, file); + const basename = rel.split("/").pop()!; + // gsd-db.ts has intentionally silent provider probes + if (basename === "gsd-db.ts" || basename === "session-lock.ts") continue; + const empties = findEmptyCatches(file); for (const empty of empties) { - const rel = file.replace(gsdDir + "/", ""); violations.push(`${rel}:${empty.line} — ${empty.text}`); } } @@ -107,7 +255,30 @@ describe("auto-mode diagnostic catch blocks (#3348)", () => { assert.equal( violations.length, 0, - `Found ${violations.length} empty catch block(s) in auto-mode files:\n${violations.join("\n")}`, + `Found ${violations.length} empty catch block(s) in migrated files:\n${violations.join("\n")}`, + ); + }); + + test("catch blocks use workflow-logger instead of raw stderr/console", () => { + const files = getGsdSourceFiles(); + assert.ok(files.length > 0, "should find GSD source files"); + + const violations: string[] = []; + for (const file of files) { + const rel = relative(gsdDir, file); + const basename = rel.split("/").pop()!; + if (EXEMPT_FILES.has(basename)) continue; + + const issues = findRawStderrCatches(file); + for (const issue of issues) { + violations.push(`${rel}:${issue.line} — ${issue.text}`); + } + } + + assert.equal( + violations.length, + 0, + `Found ${violations.length} catch block(s) using raw stderr/console instead of workflow-logger:\n${violations.join("\n")}`, ); }); }); diff --git a/src/resources/extensions/gsd/tools/complete-milestone.ts b/src/resources/extensions/gsd/tools/complete-milestone.ts index 939e07883..054bcd856 100644 --- a/src/resources/extensions/gsd/tools/complete-milestone.ts +++ b/src/resources/extensions/gsd/tools/complete-milestone.ts @@ -23,6 +23,7 @@ import { invalidateStateCache } from "../state.js"; import { renderAllProjections } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface CompleteMilestoneParams { milestoneId: string; @@ -191,9 +192,7 @@ export async function handleCompleteMilestone( await saveFile(summaryPath, summaryMd); } catch (renderErr) { // Disk render failed — roll back DB status so state stays consistent - process.stderr.write( - `gsd-db: complete_milestone — disk render failed, rolling back DB status: ${(renderErr as Error).message}\n`, - ); + logWarning("tool", `complete_milestone — disk render failed, rolling back DB status: ${(renderErr as Error).message}`); updateMilestoneStatus(params.milestoneId, 'active', null); invalidateStateCache(); return { error: `disk render failed: ${(renderErr as Error).message}` }; @@ -217,9 +216,7 @@ export async function handleCompleteMilestone( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: complete-milestone post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `complete-milestone post-mutation hook warning: ${(hookErr as Error).message}`); } return { diff --git a/src/resources/extensions/gsd/tools/complete-slice.ts b/src/resources/extensions/gsd/tools/complete-slice.ts index 759513319..6d0312cfe 100644 --- a/src/resources/extensions/gsd/tools/complete-slice.ts +++ b/src/resources/extensions/gsd/tools/complete-slice.ts @@ -30,6 +30,7 @@ import { renderRoadmapCheckboxes } from "../markdown-renderer.js"; import { renderAllProjections } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface CompleteSliceResult { sliceId: string; @@ -297,9 +298,7 @@ export async function handleCompleteSlice( } } catch (renderErr) { // Disk render failed — roll back DB status so state stays consistent - process.stderr.write( - `gsd-db: complete_slice — disk render failed, rolling back DB status: ${(renderErr as Error).message}\n`, - ); + logWarning("tool", `complete_slice — disk render failed, rolling back DB status: ${(renderErr as Error).message}`); updateSliceStatus(params.milestoneId, params.sliceId, 'pending'); invalidateStateCache(); return { error: `disk render failed: ${(renderErr as Error).message}` }; @@ -326,9 +325,7 @@ export async function handleCompleteSlice( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: complete-slice post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `complete-slice post-mutation hook warning: ${(hookErr as Error).message}`); } return { diff --git a/src/resources/extensions/gsd/tools/complete-task.ts b/src/resources/extensions/gsd/tools/complete-task.ts index 8de2daa74..93f5bc4df 100644 --- a/src/resources/extensions/gsd/tools/complete-task.ts +++ b/src/resources/extensions/gsd/tools/complete-task.ts @@ -33,6 +33,7 @@ import { renderPlanCheckboxes } from "../markdown-renderer.js"; import { renderAllProjections, renderSummaryContent } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface CompleteTaskResult { taskId: string; @@ -210,9 +211,7 @@ export async function handleCompleteTask( } } catch (renderErr) { // Disk render failed — roll back DB status so state stays consistent - process.stderr.write( - `gsd-db: complete_task — disk render failed, rolling back DB status: ${(renderErr as Error).message}\n`, - ); + logWarning("tool", `complete_task — disk render failed, rolling back DB status: ${(renderErr as Error).message}`); // Delete orphaned verification_evidence rows first (FK constraint // references tasks, so evidence must go before status change). // Without this, retries accumulate duplicate evidence rows (#2724). @@ -243,9 +242,7 @@ export async function handleCompleteTask( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: complete-task post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `complete-task post-mutation hook warning: ${(hookErr as Error).message}`); } return { diff --git a/src/resources/extensions/gsd/tools/plan-milestone.ts b/src/resources/extensions/gsd/tools/plan-milestone.ts index 17c47c632..2c24ac009 100644 --- a/src/resources/extensions/gsd/tools/plan-milestone.ts +++ b/src/resources/extensions/gsd/tools/plan-milestone.ts @@ -15,6 +15,7 @@ import { renderRoadmapFromDb } from "../markdown-renderer.js"; import { renderAllProjections } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface PlanMilestoneSliceInput { sliceId: string; @@ -269,9 +270,7 @@ export async function handlePlanMilestone( const renderResult = await renderRoadmapFromDb(basePath, params.milestoneId); roadmapPath = renderResult.roadmapPath; } catch (renderErr) { - process.stderr.write( - `gsd-db: plan_milestone — render failed (DB rows preserved for debugging): ${(renderErr as Error).message}\n`, - ); + logWarning("tool", `plan_milestone — render failed (DB rows preserved for debugging): ${(renderErr as Error).message}`); invalidateStateCache(); return { error: `render failed: ${(renderErr as Error).message}` }; } @@ -292,9 +291,7 @@ export async function handlePlanMilestone( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: plan-milestone post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `plan-milestone post-mutation hook warning: ${(hookErr as Error).message}`); } return { diff --git a/src/resources/extensions/gsd/tools/plan-slice.ts b/src/resources/extensions/gsd/tools/plan-slice.ts index fa345a975..c40467d47 100644 --- a/src/resources/extensions/gsd/tools/plan-slice.ts +++ b/src/resources/extensions/gsd/tools/plan-slice.ts @@ -16,6 +16,7 @@ import { renderPlanFromDb } from "../markdown-renderer.js"; import { renderAllProjections } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface PlanSliceTaskInput { taskId: string; @@ -229,9 +230,7 @@ export async function handlePlanSlice( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: plan-slice post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `plan-slice post-mutation hook warning: ${(hookErr as Error).message}`); } return { @@ -241,9 +240,7 @@ export async function handlePlanSlice( taskPlanPaths: renderResult.taskPlanPaths, }; } catch (renderErr) { - process.stderr.write( - `gsd-db: plan_slice — render failed (DB rows preserved for debugging): ${(renderErr as Error).message}\n`, - ); + logWarning("tool", `plan_slice — render failed (DB rows preserved for debugging): ${(renderErr as Error).message}`); invalidateStateCache(); return { error: `render failed: ${(renderErr as Error).message}` }; } diff --git a/src/resources/extensions/gsd/tools/plan-task.ts b/src/resources/extensions/gsd/tools/plan-task.ts index 57b91ae0a..329ab6cd4 100644 --- a/src/resources/extensions/gsd/tools/plan-task.ts +++ b/src/resources/extensions/gsd/tools/plan-task.ts @@ -7,6 +7,7 @@ import { renderTaskPlanFromDb } from "../markdown-renderer.js"; import { renderAllProjections } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface PlanTaskParams { milestoneId: string; @@ -135,9 +136,7 @@ export async function handlePlanTask( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: plan-task post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `plan-task post-mutation hook warning: ${(hookErr as Error).message}`); } return { diff --git a/src/resources/extensions/gsd/tools/reassess-roadmap.ts b/src/resources/extensions/gsd/tools/reassess-roadmap.ts index 933fabec5..2abba6e55 100644 --- a/src/resources/extensions/gsd/tools/reassess-roadmap.ts +++ b/src/resources/extensions/gsd/tools/reassess-roadmap.ts @@ -19,6 +19,7 @@ import { renderRoadmapFromDb, renderAssessmentFromDb } from "../markdown-rendere import { renderAllProjections } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface SliceChangeInput { sliceId: string; @@ -248,9 +249,8 @@ export async function handleReassessRoadmap( ); try { if (existsSync(validationFile)) unlinkSync(validationFile); - } catch { - // Best-effort: DB row is already deleted, so state derivation - // will not see the file-based verdict as authoritative. + } catch (e) { + logWarning("tool", `validation file cleanup failed: ${(e as Error).message}`); } } @@ -271,9 +271,7 @@ export async function handleReassessRoadmap( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: reassess-roadmap post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `reassess-roadmap post-mutation hook warning: ${(hookErr as Error).message}`); } return { diff --git a/src/resources/extensions/gsd/tools/reopen-slice.ts b/src/resources/extensions/gsd/tools/reopen-slice.ts index 9064167fd..517cf191a 100644 --- a/src/resources/extensions/gsd/tools/reopen-slice.ts +++ b/src/resources/extensions/gsd/tools/reopen-slice.ts @@ -24,6 +24,7 @@ import { isClosedStatus } from "../status-guards.js"; import { renderAllProjections } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface ReopenSliceParams { milestoneId: string; @@ -113,9 +114,7 @@ export async function handleReopenSlice( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: reopen-slice post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `reopen-slice post-mutation hook warning: ${(hookErr as Error).message}`); } return { diff --git a/src/resources/extensions/gsd/tools/reopen-task.ts b/src/resources/extensions/gsd/tools/reopen-task.ts index 5f5af1ddc..ed41eed08 100644 --- a/src/resources/extensions/gsd/tools/reopen-task.ts +++ b/src/resources/extensions/gsd/tools/reopen-task.ts @@ -22,6 +22,7 @@ import { isClosedStatus } from "../status-guards.js"; import { renderAllProjections } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface ReopenTaskParams { milestoneId: string; @@ -117,9 +118,7 @@ export async function handleReopenTask( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: reopen-task post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `reopen-task post-mutation hook warning: ${(hookErr as Error).message}`); } return { diff --git a/src/resources/extensions/gsd/tools/replan-slice.ts b/src/resources/extensions/gsd/tools/replan-slice.ts index b55dae238..9b323c79c 100644 --- a/src/resources/extensions/gsd/tools/replan-slice.ts +++ b/src/resources/extensions/gsd/tools/replan-slice.ts @@ -16,6 +16,7 @@ import { renderPlanFromDb, renderReplanFromDb } from "../markdown-renderer.js"; import { renderAllProjections } from "../workflow-projections.js"; import { writeManifest } from "../workflow-manifest.js"; import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; export interface ReplanSliceTaskInput { taskId: string; @@ -226,9 +227,7 @@ export async function handleReplanSlice( trigger_reason: params.triggerReason, }); } catch (hookErr) { - process.stderr.write( - `gsd: replan-slice post-mutation hook warning: ${(hookErr as Error).message}\n`, - ); + logWarning("tool", `replan-slice post-mutation hook warning: ${(hookErr as Error).message}`); } return { diff --git a/src/resources/extensions/gsd/tools/validate-milestone.ts b/src/resources/extensions/gsd/tools/validate-milestone.ts index 5e3f57ee4..b5e62acb9 100644 --- a/src/resources/extensions/gsd/tools/validate-milestone.ts +++ b/src/resources/extensions/gsd/tools/validate-milestone.ts @@ -22,6 +22,7 @@ import { saveFile, clearParseCache } from "../files.js"; import { invalidateStateCache } from "../state.js"; import { VALIDATION_VERDICTS, isValidMilestoneVerdict } from "../verdict-parser.js"; import { insertMilestoneValidationGates } from "../milestone-validation-gates.js"; +import { logWarning } from "../workflow-logger.js"; export interface ValidateMilestoneParams { milestoneId: string; @@ -137,9 +138,7 @@ export async function handleValidateMilestone( try { await saveFile(validationPath, validationMd); } catch (renderErr) { - process.stderr.write( - `gsd-db: validate_milestone — disk render failed, rolling back DB row: ${(renderErr as Error).message}\n`, - ); + logWarning("tool", `validate_milestone — disk render failed, rolling back DB row: ${(renderErr as Error).message}`); deleteAssessmentByScope(params.milestoneId, 'milestone-validation'); return { error: `disk render failed: ${(renderErr as Error).message}` }; } diff --git a/src/resources/extensions/gsd/workflow-events.ts b/src/resources/extensions/gsd/workflow-events.ts index 87bac5efb..c69006d22 100644 --- a/src/resources/extensions/gsd/workflow-events.ts +++ b/src/resources/extensions/gsd/workflow-events.ts @@ -2,6 +2,7 @@ import { createHash, randomUUID } from "node:crypto"; import { appendFileSync, readFileSync, existsSync, mkdirSync } from "node:fs"; import { join } from "node:path"; import { atomicWriteSync } from "./atomic-write.js"; +import { logWarning } from "./workflow-logger.js"; // ─── Session ID ─────────────────────────────────────────────────────────── @@ -74,7 +75,7 @@ export function readEvents(logPath: string): WorkflowEvent[] { try { events.push(JSON.parse(line) as WorkflowEvent); } catch { - process.stderr.write(`workflow-events: skipping corrupted event line: ${line.slice(0, 80)}\n`); + logWarning("event-log", `skipping corrupted event line: ${line.slice(0, 80)}`); } } diff --git a/src/resources/extensions/gsd/workflow-logger.ts b/src/resources/extensions/gsd/workflow-logger.ts index 882059302..fa61b403d 100644 --- a/src/resources/extensions/gsd/workflow-logger.ts +++ b/src/resources/extensions/gsd/workflow-logger.ts @@ -33,7 +33,20 @@ export type LogComponent = | "compaction" // Event compaction | "reconcile" // Worktree reconciliation | "db" // Database operations (gsd-db) - | "dispatch"; // Auto-dispatch rule evaluation + | "dispatch" // Auto-dispatch rule evaluation + | "recovery" // Auto-recovery and timeout recovery + | "session" // Session lock and session state I/O + | "prompt" // Prompt construction and context injection + | "dashboard" // Auto-dashboard rendering + | "timer" // Auto-timers (idle watchdog, hard timeout) + | "worktree" // Worktree lifecycle (create, sync, merge) + | "command" // Slash command execution and maintenance + | "parallel" // Parallel orchestrator and merge + | "fs" // Safe filesystem operations + | "bootstrap" // Extension bootstrap (system-context, agent-end) + | "guided" // Guided flow (discuss, plan wizards) + | "registry" // Rule registry hook state + | "renderer"; // Markdown renderer and projections export interface LogEntry { ts: string; diff --git a/src/resources/extensions/gsd/workflow-migration.ts b/src/resources/extensions/gsd/workflow-migration.ts index 4c8a9f071..7112e74b7 100644 --- a/src/resources/extensions/gsd/workflow-migration.ts +++ b/src/resources/extensions/gsd/workflow-migration.ts @@ -7,6 +7,7 @@ import { existsSync, readdirSync, readFileSync } from "node:fs"; import { join } from "node:path"; import { _getAdapter, transaction } from "./gsd-db.js"; import { parseRoadmap, parsePlan } from "./parsers-legacy.js"; +import { logWarning } from "./workflow-logger.js"; // ─── needsAutoMigration ─────────────────────────────────────────────────── @@ -23,8 +24,8 @@ export function needsAutoMigration(basePath: string): boolean { try { const row = db.prepare("SELECT COUNT(*) as cnt FROM milestones").get(); if (row && (row["cnt"] as number) > 0) return false; - } catch { - // Table might not exist yet — that's fine, we can still migrate + } catch (e) { + logWarning("migration", `DB probe failed: ${(e as Error).message}`); return false; } @@ -71,7 +72,7 @@ export function migrateFromMarkdown(basePath: string): void { .filter(e => e.isDirectory()) .map(e => e.name); } catch { - process.stderr.write("workflow-migration: failed to read milestones directory\n"); + logWarning("migration", "failed to read milestones directory"); return; } @@ -141,7 +142,7 @@ export function migrateFromMarkdown(basePath: string): void { risk: s.risk || "low", })); } catch (err) { - process.stderr.write(`workflow-migration: failed to parse ROADMAP.md for ${mId}: ${(err as Error).message}\n`); + logWarning("migration", `failed to parse ROADMAP.md for ${mId}: ${(err as Error).message}`); // Still add milestone with ID as title milestoneInserts.push({ id: mId, title: mId, status: milestoneStatus }); } @@ -191,7 +192,7 @@ export function migrateFromMarkdown(basePath: string): void { }); } } catch (err) { - process.stderr.write(`workflow-migration: failed to parse ${slice.id}-PLAN.md for ${mId}: ${(err as Error).message}\n`); + logWarning("migration", `failed to parse ${slice.id}-PLAN.md for ${mId}: ${(err as Error).message}`); } } } @@ -206,8 +207,8 @@ export function migrateFromMarkdown(basePath: string): void { process.stderr.write(`workflow-migration: orphaned summary file ${summaryFile} in ${mId} (slice not found in ROADMAP.md), skipping\n`); } } - } catch { - // Non-fatal + } catch (e) { + logWarning("migration", `Orphaned summary check failed for ${mId}: ${(e as Error).message}`); } } @@ -308,17 +309,18 @@ export function validateMigration(basePath: string): { discrepancies: string[] } const planContent = readFileSync(planPath, "utf-8"); const plan = parsePlan(planContent); mdTaskCount += plan.tasks.length; - } catch { - // Skip unreadable plan + } catch (e) { + logWarning("migration", `Failed to read plan ${slice.id}-PLAN.md: ${(e as Error).message}`); } } } - } catch { - // Skip unreadable roadmap + } catch (e) { + logWarning("migration", `Failed to read roadmap for ${mId}: ${(e as Error).message}`); } } } - } catch { + } catch (e) { + logWarning("migration", `Validation failed to read markdown: ${(e as Error).message}`); return { discrepancies: ["Failed to read markdown for validation"] }; } diff --git a/src/resources/extensions/gsd/workflow-projections.ts b/src/resources/extensions/gsd/workflow-projections.ts index 7a16c0e56..cc797a7ca 100644 --- a/src/resources/extensions/gsd/workflow-projections.ts +++ b/src/resources/extensions/gsd/workflow-projections.ts @@ -423,7 +423,7 @@ export function regenerateIfMissing( renderSummaryProjection(basePath, milestoneId, sliceId, task.id); regenerated++; } catch (err) { - console.error(`[projections] regenerateIfMissing SUMMARY failed for ${task.id}:`, err); + logWarning("projection", `regenerateIfMissing SUMMARY failed for ${task.id}: ${(err as Error).message}`); } } } @@ -452,7 +452,7 @@ export function regenerateIfMissing( } return true; } catch (err) { - console.error(`[projections] regenerateIfMissing ${fileType} failed:`, err); + logWarning("projection", `regenerateIfMissing ${fileType} failed: ${(err as Error).message}`); return false; } } diff --git a/src/resources/extensions/gsd/workflow-reconcile.ts b/src/resources/extensions/gsd/workflow-reconcile.ts index 216f1019a..25eb0f78f 100644 --- a/src/resources/extensions/gsd/workflow-reconcile.ts +++ b/src/resources/extensions/gsd/workflow-reconcile.ts @@ -455,8 +455,8 @@ function parseEventBlock(block: string): WorkflowEvent[] { if (paramsMatch) { try { params = JSON.parse(paramsMatch[1]!) as Record; - } catch { - // Keep empty params on parse error + } catch (e) { + logWarning("reconcile", `tool call params parse failed: ${(e as Error).message}`); } i++; // consume params line } diff --git a/src/resources/extensions/gsd/worktree-manager.ts b/src/resources/extensions/gsd/worktree-manager.ts index b929e02e7..80be36e41 100644 --- a/src/resources/extensions/gsd/worktree-manager.ts +++ b/src/resources/extensions/gsd/worktree-manager.ts @@ -95,8 +95,8 @@ export function resolveGitDir(basePath: string): string { if (content.startsWith("gitdir: ")) { return resolve(basePath, content.slice(8)); } - } catch { - // Not a file or unreadable — fall through to default + } catch (e) { + logWarning("worktree", `.git file read failed: ${(e as Error).message}`); } return join(basePath, ".git"); } @@ -308,8 +308,9 @@ export function findNestedGitDirs(rootPath: string): string[] { let entries: string[]; try { entries = readdirSync(dir); - } catch { - return; // Permission denied, broken symlink, etc. + } catch (e) { + logWarning("worktree", `readdirSync failed: ${(e as Error).message}`); + return; } for (const entry of entries) { @@ -321,7 +322,8 @@ export function findNestedGitDirs(rootPath: string): string[] { let stat; try { stat = lstatSync(fullPath); - } catch { + } catch (e) { + logWarning("worktree", `lstatSync failed for ${fullPath}: ${(e as Error).message}`); continue; } if (!stat.isDirectory()) continue; @@ -337,8 +339,8 @@ export function findNestedGitDirs(rootPath: string): string[] { // Don't recurse into the nested repo — we found what we need continue; } - } catch { - // No .git here — continue scanning + } catch (e) { + logWarning("worktree", `existsSync/.git check failed for ${fullPath}: ${(e as Error).message}`); } walk(fullPath, depth + 1); @@ -374,7 +376,7 @@ export function removeWorktree( if (entry?.path) { wtPath = entry.path; } - } catch { /* fall back to computed path */ } + } catch (e) { logWarning("worktree", `nativeWorktreeList parse failed: ${(e as Error).message}`); } const resolvedWtPath = existsSync(wtPath) ? realpathSync(wtPath) : wtPath; @@ -388,7 +390,7 @@ export function removeWorktree( if (!existsSync(wtPath)) { nativeWorktreePrune(basePath); if (deleteBranch) { - try { nativeBranchDelete(basePath, branch, true); } catch { /* branch may not exist */ } + try { nativeBranchDelete(basePath, branch, true); } catch (e) { logWarning("worktree", `nativeBranchDelete failed: ${(e as Error).message}`); } } return; } @@ -422,8 +424,8 @@ export function removeWorktree( logWarning("reconcile", `Submodule changes detected — stash failed, changes may be lost during force removal`, { worktree: name, path: resolvedWtPath }); } } - } catch { - // submodule status failed — proceed with normal removal + } catch (e) { + logWarning("worktree", `submodule status check failed: ${(e as Error).message}`); } } @@ -454,11 +456,11 @@ export function removeWorktree( // Remove worktree: try non-force first when submodules have changes, // falling back to force only after submodule state has been preserved. const useForce = hasSubmoduleChanges ? false : force; - try { nativeWorktreeRemove(basePath, resolvedWtPath, useForce); } catch { /* may fail */ } + try { nativeWorktreeRemove(basePath, resolvedWtPath, useForce); } catch (e) { logWarning("worktree", `nativeWorktreeRemove failed: ${(e as Error).message}`); } // If the directory is still there (e.g. locked), try harder with force if (existsSync(resolvedWtPath)) { - try { nativeWorktreeRemove(basePath, resolvedWtPath, true); } catch { /* may fail */ } + try { nativeWorktreeRemove(basePath, resolvedWtPath, true); } catch (e) { logWarning("worktree", `nativeWorktreeRemove (force) failed: ${(e as Error).message}`); } } // (#2821) If the worktree directory STILL exists after both native removal @@ -488,7 +490,7 @@ export function removeWorktree( nativeWorktreePrune(basePath); if (deleteBranch) { - try { nativeBranchDelete(basePath, branch, true); } catch { /* branch may not exist */ } + try { nativeBranchDelete(basePath, branch, true); } catch (e) { logWarning("worktree", `final branch delete failed: ${(e as Error).message}`); } } }