From 0ece0e5413537af58f7095136737b1391fe0a04c Mon Sep 17 00:00:00 2001 From: Mikael Hugo Date: Mon, 11 May 2026 10:17:58 +0200 Subject: [PATCH] refactor(sf-ext): consolidate sfHome, counters, tool helpers, settings path, post-mutation hook - rf2-01: replace 23 inline `process.env.SF_HOME || join(homedir(), '.sf')` patterns across 19 files with canonical `sfHome()` from sf-home.js; removes 5 private sfHome/getSfHome function definitions and unused os/homedir imports - rf2-05: extract `ensureWritableParent` and `errorMessage` from complete-task.js and complete-slice.js into new tools/tool-helpers.js - rf2-06: add `runPostMutationHook` to tool-helpers.js; replace 8 identical try/catch blocks (plan-task, plan-slice, plan-milestone, replan-slice, reassess-roadmap, reopen-slice, reopen-task, reopen-milestone) with single call - rf2-09: add `makeDiskCounter` factory in auto-dispatch.js; consolidate 4 counter functions (rewrite/uat get/set/increment) from duplicated if/else DB-vs-disk logic into thin factory wrappers (~35 lines removed) - rf2-10: export `getSfAgentSettingsPath()` from preferences.js; update notifications/notify.js and permissions/permission-core.js to use it All 4375 unit tests pass. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- src/resources/extensions/sf/atomic-write.js | 2 +- src/resources/extensions/sf/auto-post-unit.js | 4 +- src/resources/extensions/sf/auto-prompts.js | 32 ++------ .../extensions/sf/auto-timeout-recovery.js | 4 +- src/resources/extensions/sf/auto-worktree.js | 7 +- src/resources/extensions/sf/auto/loop.js | 6 +- .../extensions/sf/bootstrap/crash-log.js | 7 +- .../extensions/sf/bootstrap/system-context.js | 29 +------ .../extensions/sf/commands-extensions.js | 7 +- src/resources/extensions/sf/commands-plan.js | 11 +-- .../extensions/sf/commands/catalog.js | 5 +- src/resources/extensions/sf/detection.js | 10 +-- src/resources/extensions/sf/forensics.js | 5 +- .../extensions/sf/memory-extractor.js | 4 +- .../extensions/sf/notifications/notify.js | 5 +- .../extensions/sf/onboarding-state.js | 5 +- .../extensions/sf/parallel-orchestrator.js | 3 +- src/resources/extensions/sf/paths.js | 5 +- .../sf/permissions/permission-core.js | 3 +- src/resources/extensions/sf/preferences.js | 13 +++ src/resources/extensions/sf/prompt-loader.js | 5 +- .../extensions/sf/prompt-validation.js | 7 +- src/resources/extensions/sf/repo-identity.js | 14 ++-- .../extensions/sf/schedule/schedule-store.js | 9 +-- src/resources/extensions/sf/self-feedback.js | 6 +- src/resources/extensions/sf/subagent/index.js | 3 +- .../extensions/sf/subagent/isolation.js | 5 +- .../extensions/sf/summary-helpers.js | 21 +---- src/resources/extensions/sf/sync-scheduler.js | 3 +- .../extensions/sf/tools/complete-slice.js | 13 +-- .../extensions/sf/tools/complete-task.js | 13 +-- .../extensions/sf/tools/plan-milestone.js | 29 +++---- .../extensions/sf/tools/plan-slice.js | 29 +++---- .../extensions/sf/tools/plan-task.js | 37 +++------ .../extensions/sf/tools/reassess-roadmap.js | 35 +++----- .../extensions/sf/tools/reopen-milestone.js | 46 ++++------- .../extensions/sf/tools/reopen-slice.js | 52 ++++-------- .../extensions/sf/tools/reopen-task.js | 58 +++++--------- .../extensions/sf/tools/replan-slice.js | 37 +++------ .../extensions/sf/tools/tool-helpers.js | 71 +++++++++++++++++ .../extensions/sf/uok/auto-dispatch.js | 79 ++++++++++--------- .../extensions/sf/uok/chaos-monkey.js | 13 ++- .../extensions/sf/upstream-bridge.js | 5 +- .../extensions/sf/workflow-helpers.js | 8 +- .../extensions/sf/workflow-install.js | 5 +- .../extensions/sf/workflow-plugins.js | 7 +- .../extensions/sf/workflow-templates.js | 5 +- src/resources/extensions/sf/worktree-root.js | 6 +- src/resources/extensions/sf/worktree.js | 8 +- 49 files changed, 335 insertions(+), 461 deletions(-) create mode 100644 src/resources/extensions/sf/tools/tool-helpers.js diff --git a/src/resources/extensions/sf/atomic-write.js b/src/resources/extensions/sf/atomic-write.js index 7c8a37392..8e59ce837 100644 --- a/src/resources/extensions/sf/atomic-write.js +++ b/src/resources/extensions/sf/atomic-write.js @@ -19,7 +19,7 @@ function computeRetryDelayMs(attempt) { const jitter = randomBytes(1)[0] % 5; return base + jitter; } -function delay(ms) { +export function delay(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); } /** diff --git a/src/resources/extensions/sf/auto-post-unit.js b/src/resources/extensions/sf/auto-post-unit.js index 62be26013..dde6d296a 100644 --- a/src/resources/extensions/sf/auto-post-unit.js +++ b/src/resources/extensions/sf/auto-post-unit.js @@ -10,7 +10,9 @@ * * Extracted from handleAgentEnd() in auto.ts. */ + import { detectAbandonMilestone } from "./abandon-detect.js"; +import { delay } from "./atomic-write.js"; import { resolveExpectedArtifactPath as resolveArtifactForContent } from "./auto-artifact-paths.js"; import { diagnoseExpectedArtifact, @@ -396,7 +398,7 @@ export async function postUnitPreVerification(pctx, opts) { invalidateAllCaches(); // Small delay to let files settle (skipped for sidecars where latency matters more) if (!opts?.skipSettleDelay) { - await new Promise((r) => setTimeout(r, 100)); + await delay(100); } const prefs = loadEffectiveSFPreferences()?.preferences; const uokFlags = resolveUokFlags(prefs); diff --git a/src/resources/extensions/sf/auto-prompts.js b/src/resources/extensions/sf/auto-prompts.js index 9fe24eae7..a9bf05c24 100644 --- a/src/resources/extensions/sf/auto-prompts.js +++ b/src/resources/extensions/sf/auto-prompts.js @@ -345,7 +345,7 @@ export function buildSourceFilePaths(base, mid, sid) { /** * Load and inline dependency slice summaries (full content, not just paths). */ -export async function inlineDependencySummaries(mid, sid, base, budgetChars) { +async function inlineDependencySummaries(mid, sid, base, budgetChars) { // DB primary path — get slice depends directly let depends = null; try { @@ -637,7 +637,7 @@ function extractKeywords(title) { * Queries DB memories table (primary); falls back to KNOWLEDGE.md file. * Returns null if no knowledge exists or no entries match. */ -export async function inlineKnowledgeScoped(base, keywords) { +async function inlineKnowledgeScoped(base, keywords) { try { const { isDbAvailable, getActiveMemories } = await import("./sf-db.js"); if (isDbAvailable()) { @@ -670,7 +670,7 @@ export async function inlineKnowledgeScoped(base, keywords) { * Caps the payload at `maxChars` (default 30,000 chars). * Returns null when no knowledge exists or no entries match any keyword. */ -export async function inlineKnowledgeBudgeted(base, keywords, options) { +async function inlineKnowledgeBudgeted(base, keywords, options) { const DEFAULT_MAX_CHARS = 30_000; const HARD_MAX_CHARS = 100_000; const raw = Number(options?.maxChars ?? DEFAULT_MAX_CHARS); @@ -717,7 +717,7 @@ export async function inlineKnowledgeBudgeted(base, keywords, options) { * Reads full roadmap, extracts minimal excerpt with header + predecessor + target row. * Returns null if roadmap doesn't exist or slice not found. */ -export async function inlineRoadmapExcerpt(base, mid, sid) { +async function inlineRoadmapExcerpt(base, mid, sid) { const roadmapPath = resolveMilestoneFile(base, mid, "ROADMAP"); if (!roadmapPath || !existsSync(roadmapPath)) return null; const roadmapRel = relMilestoneFile(base, mid, "ROADMAP"); @@ -1000,23 +1000,10 @@ export function buildSkillDiscoveryVars() { }; } // ─── Text Helpers ────────────────────────────────────────────────────────── -export function extractMarkdownSection(content, heading) { - const match = new RegExp(`^## ${escapeRegExp(heading)}\\s*$`, "m").exec( - content, - ); - if (!match) return null; - const start = match.index + match[0].length; - const rest = content.slice(start); - const nextHeading = rest.match(/^##\s+/m); - const end = nextHeading?.index ?? rest.length; - return rest.slice(0, end).trim(); -} -export function escapeRegExp(value) { - return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); -} // Re-exported from workflow-helpers.js: // - buildResumeSection, buildCarryForwardSection // - checkNeedsReassessment, checkNeedsRunUat +// - escapeRegExp, extractMarkdownSection // ─── Prompt Builders ────────────────────────────────────────────────────── /** * Build a prompt for the workflow-preferences unit type (deep mode). @@ -1077,15 +1064,6 @@ export async function buildDiscussRequirementsPrompt( * ask_user_questions and writes .sf/runtime/research-decision.json. * Fires after discuss-requirements and before research-project-parallel. */ -export async function buildResearchDecisionPrompt( - base, - structuredQuestionsAvailable = "false", -) { - return loadPrompt("guided-research-decision", { - workingDirectory: base, - structuredQuestionsAvailable, - }); -} /** * Build a prompt for the research-project-parallel unit type (deep mode). * Orchestrator that spawns parallel subagents covering stack, features, diff --git a/src/resources/extensions/sf/auto-timeout-recovery.js b/src/resources/extensions/sf/auto-timeout-recovery.js index eed57b928..d935b5267 100644 --- a/src/resources/extensions/sf/auto-timeout-recovery.js +++ b/src/resources/extensions/sf/auto-timeout-recovery.js @@ -3,8 +3,10 @@ * Handles idle and hard timeout recovery with escalation, steering messages, * and blocker placeholder generation. */ + import { existsSync, readFileSync } from "node:fs"; import { relative } from "node:path"; +import { delay } from "./atomic-write.js"; import { resolveAgentEnd } from "./auto-loop.js"; import { diagnoseExpectedArtifact, @@ -116,7 +118,7 @@ export async function recoverTimedOutUnit( `Recovery attempt ${attemptNumber} for ${unitType} ${unitId}. Waiting ${backoffMs / 1000}s before retry.`, "info", ); - await new Promise((r) => setTimeout(r, backoffMs)); + await delay(backoffMs); } if (unitType === "execute-task") { const status = await inspectExecuteTaskDurability(basePath, unitId); diff --git a/src/resources/extensions/sf/auto-worktree.js b/src/resources/extensions/sf/auto-worktree.js index 6ec16281d..6eef0e6a9 100644 --- a/src/resources/extensions/sf/auto-worktree.js +++ b/src/resources/extensions/sf/auto-worktree.js @@ -19,7 +19,7 @@ import { statSync, unlinkSync, } from "node:fs"; -import { homedir } from "node:os"; +import { sfHome } from './sf-home.js'; import { isAbsolute, join, sep as pathSep } from "node:path"; import { atomicWriteSync } from "./atomic-write.js"; import { debugLog } from "./debug-logger.js"; @@ -69,7 +69,6 @@ import { worktreePath, } from "./worktree-manager.js"; -const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); const PROJECT_PREFERENCES_FILE = "preferences.yaml"; // ─── Shared Constants & Helpers ───────────────────────────────────────────── /** @@ -402,7 +401,7 @@ export function syncStateToProjectRoot( * doesn't falsely trigger staleness (#804). */ export function readResourceVersion() { - const agentDir = process.env.SF_CODING_AGENT_DIR || join(sfHome, "agent"); + const agentDir = process.env.SF_CODING_AGENT_DIR || join(sfHome(), "agent"); const manifestPath = join(agentDir, "managed-resources.json"); try { const manifest = JSON.parse(readFileSync(manifestPath, "utf-8")); @@ -457,7 +456,7 @@ export function escapeStaleWorktree(base) { // when .sf is a symlink into ~/.sf/projects/ and process.cwd() // resolved through the symlink. Returning ~ would be catastrophic (#1676). const candidateSf = join(projectRoot, ".sf").replaceAll("\\", "/"); - const sfHomePath = sfHome.replaceAll("\\", "/"); + const sfHomePath = sfHome().replaceAll("\\", "/"); if (candidateSf === sfHomePath || candidateSf.startsWith(sfHomePath + "/")) { // Don't chdir to home — return base unchanged. // resolveProjectRoot() in worktree.ts has the full git-file-based recovery diff --git a/src/resources/extensions/sf/auto/loop.js b/src/resources/extensions/sf/auto/loop.js index 9ade23f77..bc3c0389e 100644 --- a/src/resources/extensions/sf/auto/loop.js +++ b/src/resources/extensions/sf/auto/loop.js @@ -9,7 +9,7 @@ import { randomUUID } from "node:crypto"; import { mkdirSync, readFileSync, unlinkSync, writeFileSync } from "node:fs"; import { join } from "node:path"; -import { atomicWriteSync } from "../atomic-write.js"; +import { atomicWriteSync, delay } from "../atomic-write.js"; import { ModelPolicyDispatchBlockedError } from "../auto-model-selection.js"; import { runAutomaticAutonomousSolverEval } from "../autonomous-solver-eval.js"; import { debugLog } from "../debug-logger.js"; @@ -358,7 +358,7 @@ async function enforceMinRequestInterval(s, prefs) { if (elapsed < minInterval) { const waitMs = minInterval - elapsed; debugLog("autoLoop", { phase: "rate-limit-wait", waitMs }); - await new Promise((r) => setTimeout(r, waitMs)); + await delay(waitMs); } } } @@ -1264,7 +1264,7 @@ export async function autoLoop(ctx, pi, s, deps) { dedupe_key: "autonomous-credential-cooldown-wait", }, ); - await new Promise((resolve) => setTimeout(resolve, waitMs)); + await delay(waitMs); finishTurn("retry", "timeout", msg); continue; // Retry iteration without incrementing consecutiveErrors } diff --git a/src/resources/extensions/sf/bootstrap/crash-log.js b/src/resources/extensions/sf/bootstrap/crash-log.js index ad30b6030..d0a66bc59 100644 --- a/src/resources/extensions/sf/bootstrap/crash-log.js +++ b/src/resources/extensions/sf/bootstrap/crash-log.js @@ -6,18 +6,15 @@ * without pulling in the full extension dependency tree. */ import { appendFileSync, mkdirSync } from "node:fs"; -import { homedir } from "node:os"; import { join } from "node:path"; +import { sfHome } from "../sf-home.js"; /** * Write a crash log to ~/.sf/crash/.log (or $SF_HOME/crash/). * Never throws — must be safe to call from any error handler. */ export function writeCrashLog(err, source) { try { - const crashDir = join( - process.env.SF_HOME ?? join(homedir(), ".sf"), - "crash", - ); + const crashDir = join(sfHome(), "crash"); mkdirSync(crashDir, { recursive: true }); const ts = new Date().toISOString().replace(/[:.]/g, "-"); const logPath = join(crashDir, `${ts}.log`); diff --git a/src/resources/extensions/sf/bootstrap/system-context.js b/src/resources/extensions/sf/bootstrap/system-context.js index 8aeb925ae..41e626de8 100644 --- a/src/resources/extensions/sf/bootstrap/system-context.js +++ b/src/resources/extensions/sf/bootstrap/system-context.js @@ -1,5 +1,5 @@ import { existsSync, readFileSync, statSync, unlinkSync } from "node:fs"; -import { homedir } from "node:os"; +import { sfHome } from '../sf-home.js'; import { join } from "node:path"; import { markCmuxPromptShown, @@ -56,13 +56,13 @@ import { hasSkillSnapshot, } from "../skill-discovery.js"; import { deriveState } from "../state.js"; +import { extractMarkdownSection } from "../workflow-helpers.js"; import { logWarning } from "../workflow-logger.js"; import { getActiveWorktreeName, getWorktreeOriginalCwd, } from "../worktree-command.js"; -const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); const _fileReadCache = new Map(); /** * Read a file with mtime-based caching. Returns the cached content if the @@ -123,7 +123,7 @@ function buildBundledSkillsTable() { } function warnDeprecatedAgentInstructions() { const paths = [ - join(sfHome, "agent-instructions.md"), + join(sfHome(), "agent-instructions.md"), join(process.cwd(), ".sf", "agent-instructions.md"), ]; for (const path of paths) { @@ -209,7 +209,7 @@ export async function buildBeforeAgentStartResult(event, ctx) { } } const { block: knowledgeBlock, globalSizeKb } = loadKnowledgeBlock( - sfHome, + sfHome(), process.cwd(), ); const architectureBlock = loadArchitectureBlock(process.cwd()); @@ -842,27 +842,6 @@ function extractSliceExecutionExcerpt(content, relPath) { ); return parts.join("\n"); } -/** - * Extract a markdown section by heading name from content. - * Returns section content until next heading or null if not found. - */ -function extractMarkdownSection(content, heading) { - const match = new RegExp(`^## ${escapeRegExp(heading)}\\s*$`, "m").exec( - content, - ); - if (!match) return null; - const start = match.index + match[0].length; - const rest = content.slice(start); - const nextHeading = rest.match(/^##\s+/m); - const end = nextHeading?.index ?? rest.length; - return rest.slice(0, end).trim(); -} -/** - * Escape special regex characters in a string. - */ -function escapeRegExp(value) { - return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); -} /** * Collapse multiple whitespace in text to single spaces. */ diff --git a/src/resources/extensions/sf/commands-extensions.js b/src/resources/extensions/sf/commands-extensions.js index 1a6992485..8880a0233 100644 --- a/src/resources/extensions/sf/commands-extensions.js +++ b/src/resources/extensions/sf/commands-extensions.js @@ -13,22 +13,21 @@ import { renameSync, writeFileSync, } from "node:fs"; -import { homedir } from "node:os"; +import { sfHome } from './sf-home.js'; import { dirname, join } from "node:path"; -const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); // ─── Registry I/O ─────────────────────────────────────────────────────────── /** * Get the path to the extension registry file. */ function getRegistryPath() { - return join(sfHome, "extensions", "registry.json"); + return join(sfHome(), "extensions", "registry.json"); } /** * Get the path to the agent extensions directory. */ function getAgentExtensionsDir() { - return join(sfHome, "agent", "extensions"); + return join(sfHome(), "agent", "extensions"); } /** * Load the extension registry, defaulting to an empty registry on error. diff --git a/src/resources/extensions/sf/commands-plan.js b/src/resources/extensions/sf/commands-plan.js index e487933cf..fa07d351d 100644 --- a/src/resources/extensions/sf/commands-plan.js +++ b/src/resources/extensions/sf/commands-plan.js @@ -19,9 +19,7 @@ import { statSync, writeFileSync, } from "node:fs"; -import { homedir } from "node:os"; -import { - basename, +import { basename, dirname, extname, isAbsolute, @@ -31,18 +29,15 @@ import { } from "node:path"; import { projectRoot } from "./commands/context.js"; import { repoIdentity } from "./repo-identity.js"; +import { sfHome } from "./sf-home.js"; import { PROMOTED_SPEC_PROJECTIONS } from "./spec-projections.js"; -function getSfHome() { - return process.env.SF_HOME || join(homedir(), ".sf"); -} - // ─── Shared helpers ───────────────────────────────────────────────────────── function resolveExternalSfRoot() { const root = projectRoot(); const id = repoIdentity(root); - return join(getSfHome(), "projects", id); + return join(sfHome(), "projects", id); } function resolveSourcePath(source) { diff --git a/src/resources/extensions/sf/commands/catalog.js b/src/resources/extensions/sf/commands/catalog.js index b281c1de4..952981ba3 100644 --- a/src/resources/extensions/sf/commands/catalog.js +++ b/src/resources/extensions/sf/commands/catalog.js @@ -1,5 +1,5 @@ import { existsSync, readdirSync, readFileSync } from "node:fs"; -import { homedir } from "node:os"; +import { sfHome } from '../sf-home.js'; import { join } from "node:path"; import { loadRegistry, @@ -7,7 +7,6 @@ import { } from "../workflow-templates.js"; import { resolveProjectRoot } from "../worktree.js"; -const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); /** * Comprehensive description of all available SF commands for help text. */ @@ -613,7 +612,7 @@ function filterOptions(partial, options, prefix = "") { } function getExtensionCompletions(prefix, action) { try { - const extDir = join(sfHome, "agent", "extensions"); + const extDir = join(sfHome(), "agent", "extensions"); const ids = []; for (const entry of readdirSync(extDir, { withFileTypes: true })) { if (!entry.isDirectory()) continue; diff --git a/src/resources/extensions/sf/detection.js b/src/resources/extensions/sf/detection.js index 41a55d1c0..477f90fb3 100644 --- a/src/resources/extensions/sf/detection.js +++ b/src/resources/extensions/sf/detection.js @@ -15,10 +15,10 @@ import { statSync, } from "node:fs"; import { homedir } from "node:os"; +import { sfHome } from './sf-home.js'; import { join } from "node:path"; import { sfRoot } from "./paths.js"; -const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); // ─── Project File Markers ─────────────────────────────────────────────────────── export const PROJECT_FILES = [ "package.json", @@ -859,20 +859,20 @@ function isMakeTestTargetSafe(basePath) { * Check if global SF setup exists (has ~/.sf/ with preferences). */ export function hasGlobalSetup() { - return existsSync(join(sfHome, "preferences.yaml")); + return existsSync(join(sfHome(), "preferences.yaml")); } /** * Check if this is the very first time SF has been used on this machine. * Returns true if ~/.sf/ doesn't exist or has no preferences or auth. */ export function isFirstEverLaunch() { - if (!existsSync(sfHome)) return true; + if (!existsSync(sfHome())) return true; // If we have preferences, not first launch - if (existsSync(join(sfHome, "preferences.yaml"))) { + if (existsSync(join(sfHome(), "preferences.yaml"))) { return false; } // If we have auth.json, not first launch (onboarding.ts already ran) - if (existsSync(join(sfHome, "agent", "auth.json"))) return false; + if (existsSync(join(sfHome(), "agent", "auth.json"))) return false; // Check legacy path too const legacyPath = join(homedir(), ".pi", "agent", "sf-preferences.md"); if (existsSync(legacyPath)) return false; diff --git a/src/resources/extensions/sf/forensics.js b/src/resources/extensions/sf/forensics.js index f90b94a13..257ab7075 100644 --- a/src/resources/extensions/sf/forensics.js +++ b/src/resources/extensions/sf/forensics.js @@ -15,11 +15,11 @@ import { statSync, writeFileSync, } from "node:fs"; -import { homedir } from "node:os"; import { join, relative } from "node:path"; import { formatDuration } from "@singularity-forge/coding-agent"; import { showNextAction } from "../shared/tui.js"; import { atomicWriteSync } from "./atomic-write.js"; +import { sfHome } from "./sf-home.js"; import { isAutoActive } from "./auto.js"; import { verifyExpectedArtifact } from "./auto-recovery.js"; import { getAutoWorktreePath } from "./auto-worktree.js"; @@ -195,8 +195,7 @@ export async function handleForensics(args, ctx, pi) { // when import.meta.url resolves to the npm-global install path (Windows). let sfSourceDir = import.meta.dirname; if (!existsSync(join(sfSourceDir, "prompts"))) { - const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); - const fallback = join(sfHome, "agent", "extensions", "sf"); + const fallback = join(sfHome(), "agent", "extensions", "sf"); if (existsSync(join(fallback, "prompts"))) sfSourceDir = fallback; } const forensicData = formatReportForPrompt(report); diff --git a/src/resources/extensions/sf/memory-extractor.js b/src/resources/extensions/sf/memory-extractor.js index 92e884e03..7db81f25f 100644 --- a/src/resources/extensions/sf/memory-extractor.js +++ b/src/resources/extensions/sf/memory-extractor.js @@ -3,7 +3,9 @@ // After each unit completes, extracts durable knowledge from the session // transcript and stores it as memory entries. One extraction at a time // (mutex guard). Fire-and-forget — never blocks autonomous mode. + import { readFileSync, statSync } from "node:fs"; +import { delay } from "./atomic-write.js"; import { applyMemoryActions, decayStaleMemories, @@ -319,7 +321,7 @@ export async function extractMemoriesFromUnit( // Retry once after a brief delay if (userPrompt) { try { - await new Promise((r) => setTimeout(r, 2000)); + await delay(2000); const response2 = await llmCallFn(EXTRACTION_SYSTEM, userPrompt); const actions2 = parseMemoryResponse(response2); if (actions2.length > 0) applyMemoryActions(actions2, unitType, unitId); diff --git a/src/resources/extensions/sf/notifications/notify.js b/src/resources/extensions/sf/notifications/notify.js index 643e620e5..1d7eb54e8 100644 --- a/src/resources/extensions/sf/notifications/notify.js +++ b/src/resources/extensions/sf/notifications/notify.js @@ -21,6 +21,7 @@ import { SAY_MESSAGES, speakMessage, } from "../../shared/notify.js"; +import { getSfAgentSettingsPath } from "../preferences.js"; const DEFAULT_CONFIG = { thresholdMs: 2000, @@ -39,7 +40,7 @@ const NotificationAction = { // Settings Loader // ───────────────────────────────────────────────────────────────────────────── async function readSettingsFile() { - const sfPath = path.join(os.homedir(), ".sf", "agent", "settings.json"); + const sfPath = getSfAgentSettingsPath(); const piPath = path.join(os.homedir(), ".pi", "agent", "settings.json"); for (const p of [sfPath, piPath]) { try { @@ -94,7 +95,7 @@ function extractOptionText(action, iconPrefix) { } async function saveGlobalSettings(_ctx, updates) { try { - const sfPath = path.join(os.homedir(), ".sf", "agent", "settings.json"); + const sfPath = getSfAgentSettingsPath(); let fileSettings = {}; try { const content = await fs.readFile(sfPath, "utf8"); diff --git a/src/resources/extensions/sf/onboarding-state.js b/src/resources/extensions/sf/onboarding-state.js index 95008f24e..1dfd2549b 100644 --- a/src/resources/extensions/sf/onboarding-state.js +++ b/src/resources/extensions/sf/onboarding-state.js @@ -11,9 +11,9 @@ import { unlinkSync, writeFileSync, } from "node:fs"; -import { homedir } from "node:os"; import { dirname, join } from "node:path"; import { logWarning } from "./workflow-logger.js"; +import { sfHome } from "./sf-home.js"; /** * Bump `FLOW_VERSION` whenever a new required step is added to ONBOARDING_STEPS. * Records with an older flowVersion are treated as "needs partial re-onboarding" @@ -26,8 +26,7 @@ const RECORD_VERSION = 1; // resources tsconfig; importing from src/ pulls files outside src/resources // and breaks the build. const AGENT_DIR = - process.env.SF_CODING_AGENT_DIR || - join(process.env.SF_HOME || join(homedir(), ".sf"), "agent"); + process.env.SF_CODING_AGENT_DIR || join(sfHome(), "agent"); const FILE = join(AGENT_DIR, "onboarding.json"); const DEFAULT = { version: RECORD_VERSION, diff --git a/src/resources/extensions/sf/parallel-orchestrator.js b/src/resources/extensions/sf/parallel-orchestrator.js index 8595a2278..d59600042 100644 --- a/src/resources/extensions/sf/parallel-orchestrator.js +++ b/src/resources/extensions/sf/parallel-orchestrator.js @@ -17,6 +17,7 @@ import { writeFileSync, } from "node:fs"; import { join } from "node:path"; +import { delay } from "./atomic-write.js"; import { autoWorktreeBranch, runWorktreePostCreateHook, @@ -257,7 +258,7 @@ async function waitForWorkerExit(worker, timeoutMs) { const startedAt = Date.now(); while (Date.now() - startedAt < timeoutMs) { if (!isPidAlive(worker.pid)) return true; - await new Promise((resolve) => setTimeout(resolve, 50)); + await delay(50); } return !isPidAlive(worker.pid); } diff --git a/src/resources/extensions/sf/paths.js b/src/resources/extensions/sf/paths.js index 337fb427e..226e2fb87 100644 --- a/src/resources/extensions/sf/paths.js +++ b/src/resources/extensions/sf/paths.js @@ -16,10 +16,10 @@ import { readFileSync, realpathSync, } from "node:fs"; -import { homedir } from "node:os"; import { dirname, join, normalize } from "node:path"; import { DIR_CACHE_MAX } from "./constants.js"; import { nativeScanSfTree } from "./native-parser-bridge.js"; +import { sfHome } from "./sf-home.js"; // ─── Directory Listing Cache ────────────────────────────────────────────────── const dirEntryCache = new Map(); @@ -314,7 +314,6 @@ export function sfRoot(basePath) { } export const projectRoot = sfRoot; // ─── Self-Detection & Runtime Root ─────────────────────────────────────────── -const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); let _isRunningOnSelfCache = null; /** * Detect whether SF is running on its own source tree. When true, runtime @@ -373,7 +372,7 @@ export function _resetSelfDetectionCache() { * — they are durable project memory per ADR-001 and remain in the repo. */ export function sfRuntimeRoot(basePath) { - if (isRunningOnSelf(basePath)) return sfHome; + if (isRunningOnSelf(basePath)) return sfHome(); return sfRoot(basePath); } /** diff --git a/src/resources/extensions/sf/permissions/permission-core.js b/src/resources/extensions/sf/permissions/permission-core.js index 9e34e0c40..57434fe85 100644 --- a/src/resources/extensions/sf/permissions/permission-core.js +++ b/src/resources/extensions/sf/permissions/permission-core.js @@ -10,6 +10,7 @@ import * as fs from "node:fs"; import * as path from "node:path"; import { parse } from "shell-quote"; +import { getSfAgentSettingsPath } from "../preferences.js"; export const LEVELS = ["minimal", "low", "medium", "high", "bypassed"]; export const PERMISSION_MODES = ["ask", "block"]; export const LEVEL_INDEX = { @@ -226,7 +227,7 @@ function checkOverrides(command, overrides) { // SETTINGS PERSISTENCE // ============================================================================ function getSfSettingsPath() { - return path.join(process.env.HOME || "", ".sf", "agent", "settings.json"); + return getSfAgentSettingsPath(); } function getPiSettingsPath() { return path.join(process.env.HOME || "", ".pi", "agent", "settings.json"); diff --git a/src/resources/extensions/sf/preferences.js b/src/resources/extensions/sf/preferences.js index 6a0c471aa..8822f6c19 100644 --- a/src/resources/extensions/sf/preferences.js +++ b/src/resources/extensions/sf/preferences.js @@ -73,6 +73,19 @@ export { function sfHome() { return process.env.SF_HOME || join(homedir(), ".sf"); } + +/** + * Return the canonical path to the SF agent settings file (~/.sf/agent/settings.json). + * + * Purpose: provide a single source of truth for the settings path so that + * notifications, permissions, and other modules don't each hard-code the same + * homedir join. + * + * Consumer: notifications/notify.js, permissions/permission-core.js. + */ +export function getSfAgentSettingsPath() { + return join(sfHome(), "agent", "settings.json"); +} // Canonical location — pure YAML, no frontmatter markers function globalPreferencesYamlPath() { return join(sfHome(), "preferences.yaml"); diff --git a/src/resources/extensions/sf/prompt-loader.js b/src/resources/extensions/sf/prompt-loader.js index 1e77b1b97..695b7f0ef 100644 --- a/src/resources/extensions/sf/prompt-loader.js +++ b/src/resources/extensions/sf/prompt-loader.js @@ -17,9 +17,9 @@ * that aren't read until the end of a long autonomous mode run. */ import { existsSync, readdirSync, readFileSync } from "node:fs"; -import { homedir } from "node:os"; import { join } from "node:path"; import { SF_PARSE_ERROR, SFError } from "./errors.js"; +import { sfHome } from "./sf-home.js"; import { logWarning } from "./workflow-logger.js"; /** @@ -36,8 +36,7 @@ function resolveExtensionDir() { const moduleDir = import.meta.dirname; if (existsSync(join(moduleDir, "prompts"))) return moduleDir; // Fallback: user-local agent directory - const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); - const agentSfDir = join(sfHome, "agent", "extensions", "sf"); + const agentSfDir = join(sfHome(), "agent", "extensions", "sf"); if (existsSync(join(agentSfDir, "prompts"))) return agentSfDir; // Last resort: return the module dir (warmCache will silently handle the miss) return moduleDir; diff --git a/src/resources/extensions/sf/prompt-validation.js b/src/resources/extensions/sf/prompt-validation.js index 399a11072..70d5cd57d 100644 --- a/src/resources/extensions/sf/prompt-validation.js +++ b/src/resources/extensions/sf/prompt-validation.js @@ -9,6 +9,7 @@ * markdown file cannot silently drop a quality gate. */ import { getGatesForTurn } from "./gate-registry.js"; +import { escapeRegExp } from "./workflow-helpers.js"; /** * Validate that enhanced context content has all required sections. * @@ -83,12 +84,6 @@ export function validateEnhancedContext(content) { // heading for every gate owned by that turn. The registry is the source // of truth for which sections must exist; adding a new gate automatically // flows through via `getGatesForTurn(turn)`. -/** - * Escape a string so it can be embedded safely inside a regular expression. - */ -function escapeRegExp(value) { - return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); -} /** * Validate that an artifact contains an `## H2` heading for every gate the * named turn owns. Returns the list of missing gate section headers. diff --git a/src/resources/extensions/sf/repo-identity.js b/src/resources/extensions/sf/repo-identity.js index 4fbb61cb5..24b89ecac 100644 --- a/src/resources/extensions/sf/repo-identity.js +++ b/src/resources/extensions/sf/repo-identity.js @@ -21,10 +21,8 @@ import { unlinkSync, writeFileSync, } from "node:fs"; -import { homedir } from "node:os"; import { basename, dirname, join, resolve } from "node:path"; - -const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); +import { sfHome } from "./sf-home.js"; function isRepoMeta(value) { if (!value || typeof value !== "object") return false; const v = value; @@ -159,7 +157,7 @@ function isProjectSf(sfPath) { // Recompute sfHome dynamically so env overrides (SF_HOME) are // picked up at call time, not just at module load time. if (stat.isDirectory()) { - const currentSfHome = process.env.SF_HOME || join(homedir(), ".sf"); + const currentSfHome = sfHome(); const normalizedSfPath = canonicalizeExistingPath(sfPath); const normalizedSfHome = canonicalizeExistingPath(currentSfHome); if (normalizedSfPath === normalizedSfHome) return false; @@ -297,7 +295,7 @@ export function repoIdentity(basePath) { * otherwise `~/.sf/projects/`. */ export function externalSfRoot(basePath) { - const base = process.env.SF_STATE_DIR || sfHome; + const base = process.env.SF_STATE_DIR || sfHome(); return join(base, "projects", repoIdentity(basePath)); } /** @@ -305,7 +303,7 @@ export function externalSfRoot(basePath) { * Honors SF_STATE_DIR override before falling back to SF_HOME. */ export function externalProjectsRoot() { - const base = process.env.SF_STATE_DIR || sfHome; + const base = process.env.SF_STATE_DIR || sfHome(); return join(base, "projects"); } // ─── Numbered Variant Cleanup ──────────────────────────────────────────────── @@ -421,7 +419,7 @@ function resolveExternalPathWithRecovery(projectPath) { const markerId = readSfIdMarker(projectPath); if (markerId && markerId !== computedId) { // The marker points to a different identity — the repo was likely moved. - const base = process.env.SF_STATE_DIR || sfHome; + const base = process.env.SF_STATE_DIR || sfHome(); const markerPath = join(base, "projects", markerId); if (hasProjectState(markerPath)) { // Recover: use the old state directory and update the marker to the new identity. @@ -490,7 +488,7 @@ function ensureSfSymlinkCore(projectPath) { // not a project .sf. This can happen if resolveProjectRoot() or // escapeStaleWorktree() returned ~ as the project root (#1676). const localSfNormalized = localSf.replaceAll("\\", "/"); - const sfHomePath = sfHome.replaceAll("\\", "/"); + const sfHomePath = sfHome().replaceAll("\\", "/"); if (localSfNormalized === sfHomePath) { return localSf; } diff --git a/src/resources/extensions/sf/schedule/schedule-store.js b/src/resources/extensions/sf/schedule/schedule-store.js index 8b5a4ac50..e960fa245 100644 --- a/src/resources/extensions/sf/schedule/schedule-store.js +++ b/src/resources/extensions/sf/schedule/schedule-store.js @@ -7,8 +7,8 @@ * Consumer: schedule CLI commands (S02), autonomous dispatch reminders, and UI overlays. */ import { existsSync, mkdirSync, readFileSync } from "node:fs"; -import { homedir } from "node:os"; import { join } from "node:path"; +import { sfHome } from "../sf-home.js"; import { sfRoot } from "../paths.js"; import { countScheduleEntries, @@ -22,9 +22,6 @@ import { const FILENAME = "schedule.jsonl"; const SCHEDULE_SCHEMA_VERSION = 1; -/** @type {string} */ -const _sfHome = process.env.SF_HOME || join(homedir(), ".sf"); - // ─── Public API ───────────────────────────────────────────────────────────── /** @@ -77,7 +74,7 @@ export function resolveSchedulePath(basePath, scope) { */ function _resolvePath(basePath, scope) { if (scope === "global") { - return join(_sfHome, FILENAME); + return join(sfHome(), FILENAME); } return join(sfRoot(basePath), FILENAME); } @@ -158,7 +155,7 @@ function importLegacyScheduleFile(basePath, scope) { } function scheduleDbDir(basePath, scope) { - if (scope === "global") return _sfHome; + if (scope === "global") return sfHome(); return sfRoot(basePath); } diff --git a/src/resources/extensions/sf/self-feedback.js b/src/resources/extensions/sf/self-feedback.js index 62f8496f2..30f70a683 100644 --- a/src/resources/extensions/sf/self-feedback.js +++ b/src/resources/extensions/sf/self-feedback.js @@ -36,8 +36,8 @@ import { renameSync, writeFileSync, } from "node:fs"; -import { homedir } from "node:os"; import { dirname, join } from "node:path"; +import { sfHome } from "./sf-home.js"; import { resolveMilestoneFile, sfRuntimeRoot } from "./paths.js"; import { insertSelfFeedbackEntry, @@ -46,7 +46,6 @@ import { resolveSelfFeedbackEntry, } from "./sf-db.js"; -const SF_HOME = process.env.SF_HOME || join(homedir(), ".sf"); const SELF_FEEDBACK_HEADER = "# SF Self-Feedback\n\n" + "Anomalies caught during auto runs (by runtime detectors or via the\n" + @@ -91,8 +90,7 @@ function projectMarkdownPath(basePath) { return join(sfRuntimeRoot(basePath), "SELF-FEEDBACK.md"); } function upstreamLogPath() { - const sfHome = process.env.SF_HOME || SF_HOME; - return join(sfHome, "agent", "upstream-feedback.jsonl"); + return join(sfHome(), "agent", "upstream-feedback.jsonl"); } /** * Return the operator-facing destination for new self-feedback in `basePath`. diff --git a/src/resources/extensions/sf/subagent/index.js b/src/resources/extensions/sf/subagent/index.js index fd40dc28b..b3dd3171f 100644 --- a/src/resources/extensions/sf/subagent/index.js +++ b/src/resources/extensions/sf/subagent/index.js @@ -24,6 +24,7 @@ import { Container, Markdown, Spacer, Text } from "@singularity-forge/tui"; import { CmuxClient, shellEscape } from "../../cmux/index.js"; import { formatTokenCount } from "../../shared/mod.js"; import { getCurrentPhase } from "../../shared/sf-phase-state.js"; +import { delay } from "../atomic-write.js"; import { buildSiftEnv, ensureSiftRuntimeDirs, @@ -1109,7 +1110,7 @@ async function waitForFile(filePath, signal, timeoutMs = 30 * 60 * 1000) { while (Date.now() - started < timeoutMs) { if (signal?.aborted) return false; if (fs.existsSync(filePath)) return true; - await new Promise((resolve) => setTimeout(resolve, 150)); + await delay(150); } return false; } diff --git a/src/resources/extensions/sf/subagent/isolation.js b/src/resources/extensions/sf/subagent/isolation.js index 72ab0b62c..9cb9cb3a1 100644 --- a/src/resources/extensions/sf/subagent/isolation.js +++ b/src/resources/extensions/sf/subagent/isolation.js @@ -7,9 +7,9 @@ */ import { execFile as execFileCb } from "node:child_process"; import * as fs from "node:fs"; -import * as os from "node:os"; import * as path from "node:path"; import { promisify } from "node:util"; +import { sfHome } from "../sf-home.js"; const execFile = promisify(execFileCb); // ============================================================================ @@ -20,9 +20,8 @@ export function encodeCwd(cwd) { // prefixes cannot leak into the isolation path. return Buffer.from(cwd, "utf8").toString("base64url"); } -const sfHome = process.env.SF_HOME || path.join(os.homedir(), ".sf"); function getIsolationBaseDir(cwd, taskId) { - return path.join(sfHome, "wt", encodeCwd(cwd), taskId); + return path.join(sfHome(), "wt", encodeCwd(cwd), taskId); } // Track active isolation dirs for cleanup on exit const activeIsolations = new Set(); diff --git a/src/resources/extensions/sf/summary-helpers.js b/src/resources/extensions/sf/summary-helpers.js index 9fdebb105..fcaf99487 100644 --- a/src/resources/extensions/sf/summary-helpers.js +++ b/src/resources/extensions/sf/summary-helpers.js @@ -9,6 +9,7 @@ import { loadFile, parseSummary } from "./files.js"; import { relSlicePath, resolveTaskFiles, resolveTasksDir } from "./paths.js"; +import { extractMarkdownSection } from "./workflow-helpers.js"; /** * Extract and format a slice summary as a compact excerpt. @@ -196,22 +197,6 @@ export function isSummaryCleanForSkip(content) { } } -function escapeRegExpLocal(value) { - return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); -} - -function extractMarkdownSectionLocal(content, heading) { - const match = new RegExp(`^## ${escapeRegExpLocal(heading)}\\s*$`, "m").exec( - content, - ); - if (!match) return null; - const start = match.index + match[0].length; - const rest = content.slice(start); - const nextHeading = rest.match(/^##\s+/m); - const end = nextHeading?.index ?? rest.length; - return rest.slice(0, end).trim(); -} - /** * Extract key sections from a slice PLAN.md for use in task execution prompts. * Returns Goal, Demo, Verification, and Observability sections as a compact excerpt. @@ -229,8 +214,8 @@ export function extractSliceExecutionExcerpt(content, relPath) { const lines = content.split("\n"); const goalLine = lines.find((line) => line.startsWith("**Goal:**"))?.trim(); const demoLine = lines.find((line) => line.startsWith("**Demo:**"))?.trim(); - const verification = extractMarkdownSectionLocal(content, "Verification"); - const observability = extractMarkdownSectionLocal( + const verification = extractMarkdownSection(content, "Verification"); + const observability = extractMarkdownSection( content, "Observability / Diagnostics", ); diff --git a/src/resources/extensions/sf/sync-scheduler.js b/src/resources/extensions/sf/sync-scheduler.js index d614c6c99..1e61604dd 100644 --- a/src/resources/extensions/sf/sync-scheduler.js +++ b/src/resources/extensions/sf/sync-scheduler.js @@ -14,6 +14,7 @@ * - Session-end flush: before unit completes, pending syncs are flushed (best-effort) */ +import { delay } from "./atomic-write.js"; import { syncMemoryToSm } from "./sm-client.js"; /** @@ -191,7 +192,7 @@ async function trySyncWithRetry(item, attempt = 0) { // Exponential backoff: 1s, 2s, 4s const delayMs = BACKOFF_BASE_MS * 2 ** attempt; - await new Promise((resolve) => setTimeout(resolve, delayMs)); + await delay(delayMs); return trySyncWithRetry(item, attempt + 1); } diff --git a/src/resources/extensions/sf/tools/complete-slice.js b/src/resources/extensions/sf/tools/complete-slice.js index e9c29ee44..430228b99 100644 --- a/src/resources/extensions/sf/tools/complete-slice.js +++ b/src/resources/extensions/sf/tools/complete-slice.js @@ -5,9 +5,10 @@ * SUMMARY.md + UAT.md to disk, then writes the slice row to DB in a * transaction, toggles the roadmap checkbox, and invalidates caches. */ -import { promises as fs, constants as fsConstants, mkdirSync } from "node:fs"; -import { dirname, join } from "node:path"; +import { mkdirSync } from "node:fs"; +import { join } from "node:path"; import { atomicWriteAsync } from "../atomic-write.js"; +import { ensureWritableParent, errorMessage } from "./tool-helpers.js"; import { clearParseCache, extractUatType } from "../files.js"; import { getGatesForTurn } from "../gate-registry.js"; import { renderRoadmapCheckboxes } from "../markdown-renderer.js"; @@ -39,14 +40,6 @@ import { logError, logWarning } from "../workflow-logger.js"; import { writeManifest } from "../workflow-manifest.js"; import { renderAllProjections } from "../workflow-projections.js"; -async function ensureWritableParent(filePath) { - const parentDir = dirname(filePath); - await fs.mkdir(parentDir, { recursive: true }); - await fs.access(parentDir, fsConstants.W_OK); -} -function errorMessage(error) { - return error instanceof Error ? error.message : String(error); -} function yamlScalar(value) { if (/^[A-Za-z0-9_.-]+$/.test(value)) return value; return JSON.stringify(value); diff --git a/src/resources/extensions/sf/tools/complete-task.js b/src/resources/extensions/sf/tools/complete-task.js index 9eb63647b..7304155a8 100644 --- a/src/resources/extensions/sf/tools/complete-task.js +++ b/src/resources/extensions/sf/tools/complete-task.js @@ -5,9 +5,10 @@ * task row to DB in a transaction, toggles the plan checkbox, and invalidates * caches. */ -import { promises as fs, constants as fsConstants, mkdirSync } from "node:fs"; -import { dirname, join } from "node:path"; +import { mkdirSync } from "node:fs"; +import { join } from "node:path"; import { atomicWriteAsync } from "../atomic-write.js"; +import { ensureWritableParent, errorMessage } from "./tool-helpers.js"; import { clearParseCache } from "../files.js"; import { getGatesForTurn } from "../gate-registry.js"; import { renderPlanCheckboxes } from "../markdown-renderer.js"; @@ -159,14 +160,6 @@ function normalizeCompleteTaskParams(params) { ), }; } -async function ensureWritableParent(filePath) { - const parentDir = dirname(filePath); - await fs.mkdir(parentDir, { recursive: true }); - await fs.access(parentDir, fsConstants.W_OK); -} -function errorMessage(error) { - return error instanceof Error ? error.message : String(error); -} async function writeSummaryBeforeDb(filePath, content) { try { await ensureWritableParent(filePath); diff --git a/src/resources/extensions/sf/tools/plan-milestone.js b/src/resources/extensions/sf/tools/plan-milestone.js index 169d1b07b..a0f4e1125 100644 --- a/src/resources/extensions/sf/tools/plan-milestone.js +++ b/src/resources/extensions/sf/tools/plan-milestone.js @@ -20,10 +20,8 @@ import { normalizePlanningText, normalizePlanningTextArray, } from "../validation.js"; -import { appendEvent } from "../workflow-events.js"; import { logWarning } from "../workflow-logger.js"; -import { writeManifest } from "../workflow-manifest.js"; -import { renderAllProjections } from "../workflow-projections.js"; +import { runPostMutationHook } from "./tool-helpers.js"; import { scaffoldMilestoneSlices } from "../workflow-templates.js"; function validateRiskEntries(value) { @@ -531,23 +529,14 @@ export async function handlePlanMilestone(rawParams, basePath) { } } // ── Post-mutation hook: projections, manifest, event log ─────────────── - try { - await renderAllProjections(basePath, params.milestoneId); - writeManifest(basePath); - appendEvent(basePath, { - cmd: "plan-milestone", - params: { milestoneId: params.milestoneId }, - ts: new Date().toISOString(), - actor: "agent", - actor_name: params.actorName, - trigger_reason: params.triggerReason, - }); - } catch (hookErr) { - logWarning( - "tool", - `plan-milestone post-mutation hook warning: ${hookErr.message}`, - ); - } + await runPostMutationHook(basePath, params.milestoneId, { + cmd: "plan-milestone", + params: { milestoneId: params.milestoneId }, + ts: new Date().toISOString(), + actor: "agent", + actor_name: params.actorName, + trigger_reason: params.triggerReason, + }, { caller: "plan-milestone" }); return { milestoneId: params.milestoneId, title: params.title, diff --git a/src/resources/extensions/sf/tools/plan-slice.js b/src/resources/extensions/sf/tools/plan-slice.js index 6ab99fd58..0ea50ea2d 100644 --- a/src/resources/extensions/sf/tools/plan-slice.js +++ b/src/resources/extensions/sf/tools/plan-slice.js @@ -19,10 +19,8 @@ import { invalidateStateCache } from "../state.js"; import { isClosedStatus } from "../status-guards.js"; import { taskFrontmatterFromRecord } from "../task-frontmatter.js"; import { isNonEmptyString, normalizePlanningText } from "../validation.js"; -import { appendEvent } from "../workflow-events.js"; import { logWarning } from "../workflow-logger.js"; -import { writeManifest } from "../workflow-manifest.js"; -import { renderAllProjections } from "../workflow-projections.js"; +import { runPostMutationHook } from "./tool-helpers.js"; const PLANNING_MEETING_REQUIRED_MESSAGE = "planningMeeting must be a populated object — write at least 2-3 perspectives. Skipping is not allowed."; @@ -386,23 +384,14 @@ export async function handlePlanSlice(rawParams, basePath) { invalidateStateCache(); clearParseCache(); // ── Post-mutation hook: projections, manifest, event log ───────────── - try { - await renderAllProjections(basePath, params.milestoneId); - writeManifest(basePath); - appendEvent(basePath, { - cmd: "plan-slice", - params: { milestoneId: params.milestoneId, sliceId: params.sliceId }, - ts: new Date().toISOString(), - actor: "agent", - actor_name: params.actorName, - trigger_reason: params.triggerReason, - }); - } catch (hookErr) { - logWarning( - "tool", - `plan-slice post-mutation hook warning: ${hookErr.message}`, - ); - } + await runPostMutationHook(basePath, params.milestoneId, { + cmd: "plan-slice", + params: { milestoneId: params.milestoneId, sliceId: params.sliceId }, + ts: new Date().toISOString(), + actor: "agent", + actor_name: params.actorName, + trigger_reason: params.triggerReason, + }, { caller: "plan-slice" }); return { milestoneId: params.milestoneId, sliceId: params.sliceId, diff --git a/src/resources/extensions/sf/tools/plan-task.js b/src/resources/extensions/sf/tools/plan-task.js index 3b728016b..136b87140 100644 --- a/src/resources/extensions/sf/tools/plan-task.js +++ b/src/resources/extensions/sf/tools/plan-task.js @@ -15,10 +15,8 @@ import { normalizePlanningText, normalizePlanningTextArray, } from "../validation.js"; -import { appendEvent } from "../workflow-events.js"; import { logWarning } from "../workflow-logger.js"; -import { writeManifest } from "../workflow-manifest.js"; -import { renderAllProjections } from "../workflow-projections.js"; +import { runPostMutationHook } from "./tool-helpers.js"; function validateParams(params) { if (!isNonEmptyString(params?.milestoneId)) @@ -150,27 +148,18 @@ export async function handlePlanTask(rawParams, basePath) { invalidateStateCache(); clearParseCache(); // ── Post-mutation hook: projections, manifest, event log ───────────── - try { - await renderAllProjections(basePath, params.milestoneId); - writeManifest(basePath); - appendEvent(basePath, { - cmd: "plan-task", - params: { - milestoneId: params.milestoneId, - sliceId: params.sliceId, - taskId: params.taskId, - }, - ts: new Date().toISOString(), - actor: "agent", - actor_name: params.actorName, - trigger_reason: params.triggerReason, - }); - } catch (hookErr) { - logWarning( - "tool", - `plan-task post-mutation hook warning: ${hookErr.message}`, - ); - } + await runPostMutationHook(basePath, params.milestoneId, { + cmd: "plan-task", + params: { + milestoneId: params.milestoneId, + sliceId: params.sliceId, + taskId: params.taskId, + }, + ts: new Date().toISOString(), + actor: "agent", + actor_name: params.actorName, + trigger_reason: params.triggerReason, + }, { caller: "plan-task" }); return { milestoneId: params.milestoneId, sliceId: params.sliceId, diff --git a/src/resources/extensions/sf/tools/reassess-roadmap.js b/src/resources/extensions/sf/tools/reassess-roadmap.js index 086374022..07d5bf0f1 100644 --- a/src/resources/extensions/sf/tools/reassess-roadmap.js +++ b/src/resources/extensions/sf/tools/reassess-roadmap.js @@ -20,10 +20,8 @@ import { import { invalidateStateCache } from "../state.js"; import { isClosedStatus } from "../status-guards.js"; import { isNonEmptyString } from "../validation.js"; -import { appendEvent } from "../workflow-events.js"; import { logWarning } from "../workflow-logger.js"; -import { writeManifest } from "../workflow-manifest.js"; -import { renderAllProjections } from "../workflow-projections.js"; +import { runPostMutationHook } from "./tool-helpers.js"; function validateParams(params) { if (!isNonEmptyString(params?.milestoneId)) @@ -280,26 +278,17 @@ export async function handleReassessRoadmap(rawParams, basePath) { invalidateStateCache(); clearParseCache(); // ── Post-mutation hook: projections, manifest, event log ───── - try { - await renderAllProjections(basePath, params.milestoneId); - writeManifest(basePath); - appendEvent(basePath, { - cmd: "reassess-roadmap", - params: { - milestoneId: params.milestoneId, - completedSliceId: params.completedSliceId, - }, - ts: new Date().toISOString(), - actor: "agent", - actor_name: params.actorName, - trigger_reason: params.triggerReason, - }); - } catch (hookErr) { - logWarning( - "tool", - `reassess-roadmap post-mutation hook warning: ${hookErr.message}`, - ); - } + await runPostMutationHook(basePath, params.milestoneId, { + cmd: "reassess-roadmap", + params: { + milestoneId: params.milestoneId, + completedSliceId: params.completedSliceId, + }, + ts: new Date().toISOString(), + actor: "agent", + actor_name: params.actorName, + trigger_reason: params.triggerReason, + }, { caller: "reassess-roadmap" }); return { milestoneId: params.milestoneId, completedSliceId: params.completedSliceId, diff --git a/src/resources/extensions/sf/tools/reopen-milestone.js b/src/resources/extensions/sf/tools/reopen-milestone.js index 0c6330601..707986317 100644 --- a/src/resources/extensions/sf/tools/reopen-milestone.js +++ b/src/resources/extensions/sf/tools/reopen-milestone.js @@ -27,17 +27,12 @@ import { } from "../sf-db.js"; import { invalidateStateCache } from "../state.js"; import { isClosedStatus } from "../status-guards.js"; -import { appendEvent } from "../workflow-events.js"; +import { isNonEmptyString } from "../validation.js"; import { logWarning } from "../workflow-logger.js"; -import { writeManifest } from "../workflow-manifest.js"; -import { renderAllProjections } from "../workflow-projections.js"; +import { runPostMutationHook } from "./tool-helpers.js"; export async function handleReopenMilestone(params, basePath) { // ── Validate required fields ──────────────────────────────────────────── - if ( - !params.milestoneId || - typeof params.milestoneId !== "string" || - params.milestoneId.trim() === "" - ) { + if (!isNonEmptyString(params.milestoneId)) { return { error: "milestoneId is required and must be a non-empty string" }; } // ── Guards + DB writes inside a single transaction (prevents TOCTOU) ─── @@ -109,28 +104,19 @@ export async function handleReopenMilestone(params, basePath) { } clearPathCache(); // ── Post-mutation hook ─────────────────────────────────────────────────── - try { - await renderAllProjections(basePath, params.milestoneId); - writeManifest(basePath); - appendEvent(basePath, { - cmd: "reopen-milestone", - params: { - milestoneId: params.milestoneId, - reason: params.reason ?? null, - slicesReset: slicesResetCount, - tasksReset: tasksResetCount, - }, - ts: new Date().toISOString(), - actor: "agent", - actor_name: params.actorName, - trigger_reason: params.triggerReason, - }); - } catch (hookErr) { - logWarning( - "tool", - `reopen-milestone post-mutation hook warning: ${hookErr.message}`, - ); - } + await runPostMutationHook(basePath, params.milestoneId, { + cmd: "reopen-milestone", + params: { + milestoneId: params.milestoneId, + reason: params.reason ?? null, + slicesReset: slicesResetCount, + tasksReset: tasksResetCount, + }, + ts: new Date().toISOString(), + actor: "agent", + actor_name: params.actorName, + trigger_reason: params.triggerReason, + }, { caller: "reopen-milestone" }); return { milestoneId: params.milestoneId, slicesReset: slicesResetCount, diff --git a/src/resources/extensions/sf/tools/reopen-slice.js b/src/resources/extensions/sf/tools/reopen-slice.js index ebdd52d8c..6b582cd47 100644 --- a/src/resources/extensions/sf/tools/reopen-slice.js +++ b/src/resources/extensions/sf/tools/reopen-slice.js @@ -22,24 +22,15 @@ import { } from "../sf-db.js"; import { invalidateStateCache } from "../state.js"; import { isClosedStatus } from "../status-guards.js"; -import { appendEvent } from "../workflow-events.js"; +import { isNonEmptyString } from "../validation.js"; import { logWarning } from "../workflow-logger.js"; -import { writeManifest } from "../workflow-manifest.js"; -import { renderAllProjections } from "../workflow-projections.js"; +import { runPostMutationHook } from "./tool-helpers.js"; export async function handleReopenSlice(params, basePath) { // ── Validate required fields ──────────────────────────────────────────── - if ( - !params.sliceId || - typeof params.sliceId !== "string" || - params.sliceId.trim() === "" - ) { + if (!isNonEmptyString(params.sliceId)) { return { error: "sliceId is required and must be a non-empty string" }; } - if ( - !params.milestoneId || - typeof params.milestoneId !== "string" || - params.milestoneId.trim() === "" - ) { + if (!isNonEmptyString(params.milestoneId)) { return { error: "milestoneId is required and must be a non-empty string" }; } // ── Guards + DB writes inside a single transaction (prevents TOCTOU) ─── @@ -112,28 +103,19 @@ export async function handleReopenSlice(params, basePath) { } clearPathCache(); // ── Post-mutation hook ─────────────────────────────────────────────────── - try { - await renderAllProjections(basePath, params.milestoneId); - writeManifest(basePath); - appendEvent(basePath, { - cmd: "reopen-slice", - params: { - milestoneId: params.milestoneId, - sliceId: params.sliceId, - reason: params.reason ?? null, - tasksReset: tasksResetCount, - }, - ts: new Date().toISOString(), - actor: "agent", - actor_name: params.actorName, - trigger_reason: params.triggerReason, - }); - } catch (hookErr) { - logWarning( - "tool", - `reopen-slice post-mutation hook warning: ${hookErr.message}`, - ); - } + await runPostMutationHook(basePath, params.milestoneId, { + cmd: "reopen-slice", + params: { + milestoneId: params.milestoneId, + sliceId: params.sliceId, + reason: params.reason ?? null, + tasksReset: tasksResetCount, + }, + ts: new Date().toISOString(), + actor: "agent", + actor_name: params.actorName, + trigger_reason: params.triggerReason, + }, { caller: "reopen-slice" }); return { milestoneId: params.milestoneId, sliceId: params.sliceId, diff --git a/src/resources/extensions/sf/tools/reopen-task.js b/src/resources/extensions/sf/tools/reopen-task.js index 5137ed56e..96d9884a1 100644 --- a/src/resources/extensions/sf/tools/reopen-task.js +++ b/src/resources/extensions/sf/tools/reopen-task.js @@ -20,31 +20,18 @@ import { } from "../sf-db.js"; import { invalidateStateCache } from "../state.js"; import { isClosedStatus } from "../status-guards.js"; -import { appendEvent } from "../workflow-events.js"; +import { isNonEmptyString } from "../validation.js"; import { logWarning } from "../workflow-logger.js"; -import { writeManifest } from "../workflow-manifest.js"; -import { renderAllProjections } from "../workflow-projections.js"; +import { runPostMutationHook } from "./tool-helpers.js"; export async function handleReopenTask(params, basePath) { // ── Validate required fields ──────────────────────────────────────────── - if ( - !params.taskId || - typeof params.taskId !== "string" || - params.taskId.trim() === "" - ) { + if (!isNonEmptyString(params.taskId)) { return { error: "taskId is required and must be a non-empty string" }; } - if ( - !params.sliceId || - typeof params.sliceId !== "string" || - params.sliceId.trim() === "" - ) { + if (!isNonEmptyString(params.sliceId)) { return { error: "sliceId is required and must be a non-empty string" }; } - if ( - !params.milestoneId || - typeof params.milestoneId !== "string" || - params.milestoneId.trim() === "" - ) { + if (!isNonEmptyString(params.milestoneId)) { return { error: "milestoneId is required and must be a non-empty string" }; } // ── Guards + DB write inside a single transaction (prevents TOCTOU) ──── @@ -110,28 +97,19 @@ export async function handleReopenTask(params, basePath) { } clearPathCache(); // ── Post-mutation hook ─────────────────────────────────────────────────── - try { - await renderAllProjections(basePath, params.milestoneId); - writeManifest(basePath); - appendEvent(basePath, { - cmd: "reopen-task", - params: { - milestoneId: params.milestoneId, - sliceId: params.sliceId, - taskId: params.taskId, - reason: params.reason ?? null, - }, - ts: new Date().toISOString(), - actor: "agent", - actor_name: params.actorName, - trigger_reason: params.triggerReason, - }); - } catch (hookErr) { - logWarning( - "tool", - `reopen-task post-mutation hook warning: ${hookErr.message}`, - ); - } + await runPostMutationHook(basePath, params.milestoneId, { + cmd: "reopen-task", + params: { + milestoneId: params.milestoneId, + sliceId: params.sliceId, + taskId: params.taskId, + reason: params.reason ?? null, + }, + ts: new Date().toISOString(), + actor: "agent", + actor_name: params.actorName, + trigger_reason: params.triggerReason, + }, { caller: "reopen-task" }); return { milestoneId: params.milestoneId, sliceId: params.sliceId, diff --git a/src/resources/extensions/sf/tools/replan-slice.js b/src/resources/extensions/sf/tools/replan-slice.js index a696fafd2..a8b504e98 100644 --- a/src/resources/extensions/sf/tools/replan-slice.js +++ b/src/resources/extensions/sf/tools/replan-slice.js @@ -22,10 +22,8 @@ import { normalizePlanningText, normalizePlanningTextArray, } from "../validation.js"; -import { appendEvent } from "../workflow-events.js"; import { logWarning } from "../workflow-logger.js"; -import { writeManifest } from "../workflow-manifest.js"; -import { renderAllProjections } from "../workflow-projections.js"; +import { runPostMutationHook } from "./tool-helpers.js"; function validateParams(params) { if (!isNonEmptyString(params?.milestoneId)) @@ -311,27 +309,18 @@ export async function handleReplanSlice(rawParams, basePath) { invalidateStateCache(); clearParseCache(); // ── Post-mutation hook: projections, manifest, event log ───── - try { - await renderAllProjections(basePath, params.milestoneId); - writeManifest(basePath); - appendEvent(basePath, { - cmd: "replan-slice", - params: { - milestoneId: params.milestoneId, - sliceId: params.sliceId, - blockerTaskId: params.blockerTaskId, - }, - ts: new Date().toISOString(), - actor: "agent", - actor_name: params.actorName, - trigger_reason: params.triggerReason, - }); - } catch (hookErr) { - logWarning( - "tool", - `replan-slice post-mutation hook warning: ${hookErr.message}`, - ); - } + await runPostMutationHook(basePath, params.milestoneId, { + cmd: "replan-slice", + params: { + milestoneId: params.milestoneId, + sliceId: params.sliceId, + blockerTaskId: params.blockerTaskId, + }, + ts: new Date().toISOString(), + actor: "agent", + actor_name: params.actorName, + trigger_reason: params.triggerReason, + }, { caller: "replan-slice" }); return { milestoneId: params.milestoneId, sliceId: params.sliceId, diff --git a/src/resources/extensions/sf/tools/tool-helpers.js b/src/resources/extensions/sf/tools/tool-helpers.js new file mode 100644 index 000000000..2ae45a7aa --- /dev/null +++ b/src/resources/extensions/sf/tools/tool-helpers.js @@ -0,0 +1,71 @@ +/** + * tool-helpers.js — Shared low-level utilities for SF tool handlers. + * + * Purpose: consolidate helpers that appear in multiple tool handler files + * (complete-task.js, complete-slice.js, etc.) to avoid silent divergence. + * + * Consumer: complete-task.js, complete-slice.js, and any future tool that + * writes summary files or formats error messages. + */ +import { promises as fs, constants as fsConstants } from "node:fs"; +import { dirname } from "node:path"; +import { appendEvent } from "../workflow-events.js"; +import { logWarning } from "../workflow-logger.js"; +import { writeManifest } from "../workflow-manifest.js"; +import { renderAllProjections } from "../workflow-projections.js"; + +/** + * Ensure the parent directory of filePath exists and is writable. + * Creates missing directories with recursive mkdir before checking access. + * + * Purpose: guard summary-file writes so the error message names the directory + * rather than producing a cryptic ENOENT or EACCES on the file itself. + * + * Consumer: complete-task.js, complete-slice.js writeSummaryBeforeDb helpers. + */ +export async function ensureWritableParent(filePath) { + const parentDir = dirname(filePath); + await fs.mkdir(parentDir, { recursive: true }); + await fs.access(parentDir, fsConstants.W_OK); +} + +/** + * Extract a human-readable message from an unknown caught value. + * + * Purpose: produce a string regardless of whether the thrown value is an + * Error instance or a plain string/object, so callers don't need a ternary. + * + * Consumer: complete-task.js, complete-slice.js error formatting. + */ +export function errorMessage(error) { + return error instanceof Error ? error.message : String(error); +} + +/** + * Run the standard post-mutation hook: project all projections, write the + * manifest, and append an event to the workflow log. + * + * Purpose: the three-step sequence (renderAllProjections + writeManifest + + * appendEvent) appears in every tool that mutates planning state. Extracting + * it here prevents the steps from drifting independently across tool files. + * + * Consumer: plan-task.js, plan-slice.js, plan-milestone.js, replan-slice.js, + * reassess-roadmap.js, reopen-slice.js, reopen-task.js, reopen-milestone.js. + * + * @param {string} basePath - Project base path. + * @param {string} milestoneId - Milestone ID for projection scoping. + * @param {object} event - Full event object passed to appendEvent. + * @param {{ caller: string }} opts - caller is the tool name used in the warning message. + */ +export async function runPostMutationHook(basePath, milestoneId, event, { caller }) { + try { + await renderAllProjections(basePath, milestoneId); + writeManifest(basePath); + appendEvent(basePath, event); + } catch (hookErr) { + logWarning( + "tool", + `${caller} post-mutation hook warning: ${hookErr.message}`, + ); + } +} diff --git a/src/resources/extensions/sf/uok/auto-dispatch.js b/src/resources/extensions/sf/uok/auto-dispatch.js index bf7e8cf31..d6b206f2b 100644 --- a/src/resources/extensions/sf/uok/auto-dispatch.js +++ b/src/resources/extensions/sf/uok/auto-dispatch.js @@ -10,7 +10,7 @@ */ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; -import { join } from "node:path"; +import { dirname, join } from "node:path"; import { buildChallengePrompt, buildCompleteMilestonePrompt, @@ -264,33 +264,50 @@ function findMissingSummaries(basePath, mid) { } // ─── Rewrite Circuit Breaker ────────────────────────────────────────────── const MAX_REWRITE_ATTEMPTS = 3; +// ─── DB-with-disk-fallback counter factory ─────────────────────────────── +// Counters must survive session restarts (crash recovery, pause/resume). +// The factory returns { get, set, increment } backed by SQLite when available +// and a JSON file on disk otherwise. +function makeDiskCounter(dbKey, diskPath) { + return { + get() { + if (isDbAvailable()) return getRuntimeCounter(dbKey); + try { + const data = JSON.parse(readFileSync(diskPath, "utf-8")); + return typeof data.count === "number" ? data.count : 0; + } catch { + return 0; + } + }, + set(count) { + if (isDbAvailable()) { + setRuntimeCounter(dbKey, count); + return; + } + mkdirSync(dirname(diskPath), { recursive: true }); + writeFileSync( + diskPath, + JSON.stringify({ count, updatedAt: new Date().toISOString() }) + "\n", + ); + }, + increment() { + if (isDbAvailable()) return incrementRuntimeCounter(dbKey); + const count = this.get() + 1; + this.set(count); + return count; + }, + }; +} // ─── Disk-persisted rewrite attempt counter ────────────────────────────────── -// The counter must survive session restarts (crash recovery, pause/resume, -// assisted mode). Storing it on the in-memory session object caused the circuit -// breaker to never trip — see https://github.com/singularity-forge/sf-run/issues/2203 +// See https://github.com/singularity-forge/sf-run/issues/2203 function rewriteCountPath(basePath) { return join(sfRoot(basePath), "runtime", "rewrite-count.json"); } export function getRewriteCount(basePath) { - if (isDbAvailable()) return getRuntimeCounter("rewrite-count"); - try { - const data = JSON.parse(readFileSync(rewriteCountPath(basePath), "utf-8")); - return typeof data.count === "number" ? data.count : 0; - } catch { - return 0; - } + return makeDiskCounter("rewrite-count", rewriteCountPath(basePath)).get(); } export function setRewriteCount(basePath, count) { - if (isDbAvailable()) { - setRuntimeCounter("rewrite-count", count); - return; - } - const filePath = rewriteCountPath(basePath); - mkdirSync(join(sfRoot(basePath), "runtime"), { recursive: true }); - writeFileSync( - filePath, - JSON.stringify({ count, updatedAt: new Date().toISOString() }) + "\n", - ); + makeDiskCounter("rewrite-count", rewriteCountPath(basePath)).set(count); } // ─── Run-UAT dispatch counter (per-slice) ──────────────────────────────── // Caps run-uat dispatches to prevent infinite replay when verification @@ -301,27 +318,11 @@ function uatCountPath(basePath, mid, sid) { } export function getUatCount(basePath, mid, sid) { const key = `uat-count:${mid}:${sid}`; - if (isDbAvailable()) return getRuntimeCounter(key); - try { - const data = JSON.parse( - readFileSync(uatCountPath(basePath, mid, sid), "utf-8"), - ); - return typeof data.count === "number" ? data.count : 0; - } catch { - return 0; - } + return makeDiskCounter(key, uatCountPath(basePath, mid, sid)).get(); } export function incrementUatCount(basePath, mid, sid) { const key = `uat-count:${mid}:${sid}`; - if (isDbAvailable()) return incrementRuntimeCounter(key); - const count = getUatCount(basePath, mid, sid) + 1; - const filePath = uatCountPath(basePath, mid, sid); - mkdirSync(join(sfRoot(basePath), "runtime"), { recursive: true }); - writeFileSync( - filePath, - JSON.stringify({ count, updatedAt: new Date().toISOString() }) + "\n", - ); - return count; + return makeDiskCounter(key, uatCountPath(basePath, mid, sid)).increment(); } // ─── Helpers ───────────────────────────────────────────────────────────── /** diff --git a/src/resources/extensions/sf/uok/chaos-monkey.js b/src/resources/extensions/sf/uok/chaos-monkey.js index a3f5ad5cc..4e1142405 100644 --- a/src/resources/extensions/sf/uok/chaos-monkey.js +++ b/src/resources/extensions/sf/uok/chaos-monkey.js @@ -11,6 +11,7 @@ import { mkdtempSync, rmSync, writeFileSync } from "node:fs"; import { tmpdir } from "node:os"; import { join } from "node:path"; +import { delay } from "../atomic-write.js"; const DEFAULT_LATENCY_PROBABILITY = 0.05; const DEFAULT_PARTIAL_FAILURE_PROBABILITY = 0.03; @@ -20,10 +21,6 @@ const DEFAULT_MAX_LATENCY_MS = 5000; const DEFAULT_DISK_STRESS_MB = 50; const DEFAULT_MEMORY_STRESS_MB = 100; -function randomDelay(ms) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - function randomInRange(min, max) { return min + Math.random() * (max - min); } @@ -92,12 +89,12 @@ export class ChaosMonkey { // 1. Latency injection if (Math.random() < this.latencyProbability) { - const delay = Math.floor(randomInRange(100, this.maxLatencyMs)); + const latencyMs = Math.floor(randomInRange(100, this.maxLatencyMs)); console.error( - `[CHAOS MONKEY] Injecting ${delay}ms latency during phase: ${phase}`, + `[CHAOS MONKEY] Injecting ${latencyMs}ms latency during phase: ${phase}`, ); - this._injected.push({ type: "latency", phase, delay }); - await randomDelay(delay); + this._injected.push({ type: "latency", phase, delay: latencyMs }); + await delay(latencyMs); } // 2. Partial failure (non-fatal error throw) diff --git a/src/resources/extensions/sf/upstream-bridge.js b/src/resources/extensions/sf/upstream-bridge.js index 39063a4ce..ac234fcb6 100644 --- a/src/resources/extensions/sf/upstream-bridge.js +++ b/src/resources/extensions/sf/upstream-bridge.js @@ -9,9 +9,9 @@ * Never throws — any I/O failure returns 0. */ import { existsSync, readFileSync } from "node:fs"; -import { homedir } from "node:os"; import { join } from "node:path"; import { readAllSelfFeedback, recordSelfFeedback } from "./self-feedback.js"; +import { sfHome } from "./sf-home.js"; // ─── Constants ──────────────────────────────────────────────────────────────── const SEVERITY_ORDER = ["low", "medium", "high", "critical"]; @@ -21,8 +21,7 @@ const THRESHOLD_REPOS = 2; const THIRTY_DAYS_MS = 30 * 24 * 60 * 60 * 1000; // ─── Helpers ────────────────────────────────────────────────────────────────── function getUpstreamLogPath() { - const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); - return join(sfHome, "agent", "upstream-feedback.jsonl"); + return join(sfHome(), "agent", "upstream-feedback.jsonl"); } function isForgeRepo(basePath) { try { diff --git a/src/resources/extensions/sf/workflow-helpers.js b/src/resources/extensions/sf/workflow-helpers.js index 656868c14..e3acc68fd 100644 --- a/src/resources/extensions/sf/workflow-helpers.js +++ b/src/resources/extensions/sf/workflow-helpers.js @@ -16,9 +16,9 @@ import { isDbAvailable } from "./sf-db.js"; /** * Escape regex special characters for safe use in RegExp. - * @internal Helper + * Purpose: shared utility for markdown section extraction. */ -function escapeRegExp(value) { +export function escapeRegExp(value) { return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); } @@ -32,9 +32,9 @@ function oneLine(text) { /** * Extract a markdown section by heading. - * @internal Helper + * Purpose: shared utility for markdown section extraction. */ -function extractMarkdownSection(content, heading) { +export function extractMarkdownSection(content, heading) { const match = new RegExp(`^## ${escapeRegExp(heading)}\\s*$`, "m").exec( content, ); diff --git a/src/resources/extensions/sf/workflow-install.js b/src/resources/extensions/sf/workflow-install.js index 1c974387b..969f2be20 100644 --- a/src/resources/extensions/sf/workflow-install.js +++ b/src/resources/extensions/sf/workflow-install.js @@ -23,7 +23,7 @@ import { unlinkSync, writeFileSync, } from "node:fs"; -import { homedir } from "node:os"; +import { sfHome } from './sf-home.js'; import { extname, join, sep as pathSep, resolve } from "node:path"; import { parse as parseYaml } from "yaml"; import { validateDefinition } from "./definition-loader.js"; @@ -32,7 +32,6 @@ import { validateDefinition } from "./definition-loader.js"; const MAX_RESPONSE_BYTES = 256 * 1024; const FETCH_TIMEOUT_MS = 15_000; const PROVENANCE_FILE = ".installed.json"; -const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); function provenancePath(dir) { return join(dir, PROVENANCE_FILE); } @@ -54,7 +53,7 @@ function writeProvenance(dir, data) { ); } export function globalInstallDir() { - return join(sfHome, "workflows"); + return join(sfHome(), "workflows"); } export function projectInstallDir(basePath) { return join(basePath, ".sf", "workflows"); diff --git a/src/resources/extensions/sf/workflow-plugins.js b/src/resources/extensions/sf/workflow-plugins.js index c1df678b1..9c21670e7 100644 --- a/src/resources/extensions/sf/workflow-plugins.js +++ b/src/resources/extensions/sf/workflow-plugins.js @@ -13,19 +13,18 @@ * Precedence: project > global > bundled. Same-named file wins. */ import { existsSync, readdirSync, readFileSync, statSync } from "node:fs"; -import { homedir } from "node:os"; +import { sfHome } from './sf-home.js'; import { basename, extname, join } from "node:path"; import { parse as parseYaml } from "yaml"; import { loadRegistry } from "./workflow-templates.js"; // ─── Path resolution ───────────────────────────────────────────────────── -const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); function resolveBundledDir() { const moduleDir = import.meta.dirname; const local = join(moduleDir, "workflow-templates"); if (existsSync(local)) return local; const agentSfDir = join( - sfHome, + sfHome(), "agent", "extensions", "sf", @@ -35,7 +34,7 @@ function resolveBundledDir() { return local; } function globalPluginsDir() { - return join(sfHome, "workflows"); + return join(sfHome(), "workflows"); } function projectPluginsDir(basePath) { return join(basePath, ".sf", "workflows"); diff --git a/src/resources/extensions/sf/workflow-templates.js b/src/resources/extensions/sf/workflow-templates.js index 821a1de71..d5cb6bdee 100644 --- a/src/resources/extensions/sf/workflow-templates.js +++ b/src/resources/extensions/sf/workflow-templates.js @@ -5,8 +5,8 @@ * alias, or trigger-keyword matching against user input. */ import { existsSync, readFileSync } from "node:fs"; -import { homedir } from "node:os"; import { join } from "node:path"; +import { sfHome } from "./sf-home.js"; const __extensionDir = resolveSfExtensionDir(); const registryPath = join( @@ -21,8 +21,7 @@ const registryPath = join( function resolveSfExtensionDir() { const moduleDir = import.meta.dirname; if (existsSync(join(moduleDir, "workflow-templates"))) return moduleDir; - const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); - const agentSfDir = join(sfHome, "agent", "extensions", "sf"); + const agentSfDir = join(sfHome(), "agent", "extensions", "sf"); if (existsSync(join(agentSfDir, "workflow-templates"))) return agentSfDir; return moduleDir; } diff --git a/src/resources/extensions/sf/worktree-root.js b/src/resources/extensions/sf/worktree-root.js index 72c75d9da..60db4a61c 100644 --- a/src/resources/extensions/sf/worktree-root.js +++ b/src/resources/extensions/sf/worktree-root.js @@ -1,10 +1,6 @@ import { existsSync, readFileSync, realpathSync, statSync } from "node:fs"; -import { homedir } from "node:os"; import { join, resolve } from "node:path"; - -function sfHome() { - return process.env.SF_HOME || join(homedir(), ".sf"); -} +import { sfHome } from "./sf-home.js"; export function normalizeWorktreePathForCompare(path) { let normalized; try { diff --git a/src/resources/extensions/sf/worktree.js b/src/resources/extensions/sf/worktree.js index a4c29a1aa..fc5206755 100644 --- a/src/resources/extensions/sf/worktree.js +++ b/src/resources/extensions/sf/worktree.js @@ -12,10 +12,10 @@ * SLICE_BRANCH_RE) remain for backwards compatibility with legacy branches. */ import { existsSync, readFileSync, realpathSync, utimesSync } from "node:fs"; -import { homedir } from "node:os"; import { join, resolve } from "node:path"; import { GitService, writeIntegrationBranch } from "./git-service.js"; import { loadEffectiveSFPreferences } from "./preferences.js"; +import { sfHome } from "./sf-home.js"; import { detectWorktreeName, findWorktreeSegment } from "./worktree-detect.js"; export { MergeConflictError } from "./git-service.js"; @@ -106,11 +106,9 @@ export function resolveProjectRoot(basePath) { // Layer 2: Guard against resolving to the user's home directory. // When .sf is a symlink into ~/.sf/projects/, the resolved path // contains /.sf/ at the user-level boundary. Slicing there yields ~ — wrong. - const sfHome = normalizePathForCompare( - process.env.SF_HOME || join(homedir(), ".sf"), - ); + const sfHomePath = normalizePathForCompare(sfHome()); const candidateSfPath = normalizePathForCompare(join(candidate, ".sf")); - if (candidateSfPath === sfHome || candidateSfPath.startsWith(sfHome + "/")) { + if (candidateSfPath === sfHomePath || candidateSfPath.startsWith(sfHomePath + "/")) { // The candidate is the home directory (or within it in a way that .sf // maps to the user-level SF dir). Try to recover the real project root // from the worktree's .git file.