refactor(sf-ext): consolidate sfHome, counters, tool helpers, settings path, post-mutation hook

- rf2-01: replace 23 inline `process.env.SF_HOME || join(homedir(), '.sf')` patterns
  across 19 files with canonical `sfHome()` from sf-home.js; removes 5 private
  sfHome/getSfHome function definitions and unused os/homedir imports
- rf2-05: extract `ensureWritableParent` and `errorMessage` from complete-task.js
  and complete-slice.js into new tools/tool-helpers.js
- rf2-06: add `runPostMutationHook` to tool-helpers.js; replace 8 identical
  try/catch blocks (plan-task, plan-slice, plan-milestone, replan-slice,
  reassess-roadmap, reopen-slice, reopen-task, reopen-milestone) with single call
- rf2-09: add `makeDiskCounter` factory in auto-dispatch.js; consolidate 4 counter
  functions (rewrite/uat get/set/increment) from duplicated if/else DB-vs-disk
  logic into thin factory wrappers (~35 lines removed)
- rf2-10: export `getSfAgentSettingsPath()` from preferences.js; update
  notifications/notify.js and permissions/permission-core.js to use it

All 4375 unit tests pass.

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
Mikael Hugo 2026-05-11 10:17:58 +02:00
parent 9dc244eb68
commit 0ece0e5413
49 changed files with 335 additions and 461 deletions

View file

@ -19,7 +19,7 @@ function computeRetryDelayMs(attempt) {
const jitter = randomBytes(1)[0] % 5;
return base + jitter;
}
function delay(ms) {
export function delay(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
/**

View file

@ -10,7 +10,9 @@
*
* Extracted from handleAgentEnd() in auto.ts.
*/
import { detectAbandonMilestone } from "./abandon-detect.js";
import { delay } from "./atomic-write.js";
import { resolveExpectedArtifactPath as resolveArtifactForContent } from "./auto-artifact-paths.js";
import {
diagnoseExpectedArtifact,
@ -396,7 +398,7 @@ export async function postUnitPreVerification(pctx, opts) {
invalidateAllCaches();
// Small delay to let files settle (skipped for sidecars where latency matters more)
if (!opts?.skipSettleDelay) {
await new Promise((r) => setTimeout(r, 100));
await delay(100);
}
const prefs = loadEffectiveSFPreferences()?.preferences;
const uokFlags = resolveUokFlags(prefs);

View file

@ -345,7 +345,7 @@ export function buildSourceFilePaths(base, mid, sid) {
/**
* Load and inline dependency slice summaries (full content, not just paths).
*/
export async function inlineDependencySummaries(mid, sid, base, budgetChars) {
async function inlineDependencySummaries(mid, sid, base, budgetChars) {
// DB primary path — get slice depends directly
let depends = null;
try {
@ -637,7 +637,7 @@ function extractKeywords(title) {
* Queries DB memories table (primary); falls back to KNOWLEDGE.md file.
* Returns null if no knowledge exists or no entries match.
*/
export async function inlineKnowledgeScoped(base, keywords) {
async function inlineKnowledgeScoped(base, keywords) {
try {
const { isDbAvailable, getActiveMemories } = await import("./sf-db.js");
if (isDbAvailable()) {
@ -670,7 +670,7 @@ export async function inlineKnowledgeScoped(base, keywords) {
* Caps the payload at `maxChars` (default 30,000 chars).
* Returns null when no knowledge exists or no entries match any keyword.
*/
export async function inlineKnowledgeBudgeted(base, keywords, options) {
async function inlineKnowledgeBudgeted(base, keywords, options) {
const DEFAULT_MAX_CHARS = 30_000;
const HARD_MAX_CHARS = 100_000;
const raw = Number(options?.maxChars ?? DEFAULT_MAX_CHARS);
@ -717,7 +717,7 @@ export async function inlineKnowledgeBudgeted(base, keywords, options) {
* Reads full roadmap, extracts minimal excerpt with header + predecessor + target row.
* Returns null if roadmap doesn't exist or slice not found.
*/
export async function inlineRoadmapExcerpt(base, mid, sid) {
async function inlineRoadmapExcerpt(base, mid, sid) {
const roadmapPath = resolveMilestoneFile(base, mid, "ROADMAP");
if (!roadmapPath || !existsSync(roadmapPath)) return null;
const roadmapRel = relMilestoneFile(base, mid, "ROADMAP");
@ -1000,23 +1000,10 @@ export function buildSkillDiscoveryVars() {
};
}
// ─── Text Helpers ──────────────────────────────────────────────────────────
export function extractMarkdownSection(content, heading) {
const match = new RegExp(`^## ${escapeRegExp(heading)}\\s*$`, "m").exec(
content,
);
if (!match) return null;
const start = match.index + match[0].length;
const rest = content.slice(start);
const nextHeading = rest.match(/^##\s+/m);
const end = nextHeading?.index ?? rest.length;
return rest.slice(0, end).trim();
}
export function escapeRegExp(value) {
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
// Re-exported from workflow-helpers.js:
// - buildResumeSection, buildCarryForwardSection
// - checkNeedsReassessment, checkNeedsRunUat
// - escapeRegExp, extractMarkdownSection
// ─── Prompt Builders ──────────────────────────────────────────────────────
/**
* Build a prompt for the workflow-preferences unit type (deep mode).
@ -1077,15 +1064,6 @@ export async function buildDiscussRequirementsPrompt(
* ask_user_questions and writes .sf/runtime/research-decision.json.
* Fires after discuss-requirements and before research-project-parallel.
*/
export async function buildResearchDecisionPrompt(
base,
structuredQuestionsAvailable = "false",
) {
return loadPrompt("guided-research-decision", {
workingDirectory: base,
structuredQuestionsAvailable,
});
}
/**
* Build a prompt for the research-project-parallel unit type (deep mode).
* Orchestrator that spawns parallel subagents covering stack, features,

View file

@ -3,8 +3,10 @@
* Handles idle and hard timeout recovery with escalation, steering messages,
* and blocker placeholder generation.
*/
import { existsSync, readFileSync } from "node:fs";
import { relative } from "node:path";
import { delay } from "./atomic-write.js";
import { resolveAgentEnd } from "./auto-loop.js";
import {
diagnoseExpectedArtifact,
@ -116,7 +118,7 @@ export async function recoverTimedOutUnit(
`Recovery attempt ${attemptNumber} for ${unitType} ${unitId}. Waiting ${backoffMs / 1000}s before retry.`,
"info",
);
await new Promise((r) => setTimeout(r, backoffMs));
await delay(backoffMs);
}
if (unitType === "execute-task") {
const status = await inspectExecuteTaskDurability(basePath, unitId);

View file

@ -19,7 +19,7 @@ import {
statSync,
unlinkSync,
} from "node:fs";
import { homedir } from "node:os";
import { sfHome } from './sf-home.js';
import { isAbsolute, join, sep as pathSep } from "node:path";
import { atomicWriteSync } from "./atomic-write.js";
import { debugLog } from "./debug-logger.js";
@ -69,7 +69,6 @@ import {
worktreePath,
} from "./worktree-manager.js";
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
const PROJECT_PREFERENCES_FILE = "preferences.yaml";
// ─── Shared Constants & Helpers ─────────────────────────────────────────────
/**
@ -402,7 +401,7 @@ export function syncStateToProjectRoot(
* doesn't falsely trigger staleness (#804).
*/
export function readResourceVersion() {
const agentDir = process.env.SF_CODING_AGENT_DIR || join(sfHome, "agent");
const agentDir = process.env.SF_CODING_AGENT_DIR || join(sfHome(), "agent");
const manifestPath = join(agentDir, "managed-resources.json");
try {
const manifest = JSON.parse(readFileSync(manifestPath, "utf-8"));
@ -457,7 +456,7 @@ export function escapeStaleWorktree(base) {
// when .sf is a symlink into ~/.sf/projects/<hash> and process.cwd()
// resolved through the symlink. Returning ~ would be catastrophic (#1676).
const candidateSf = join(projectRoot, ".sf").replaceAll("\\", "/");
const sfHomePath = sfHome.replaceAll("\\", "/");
const sfHomePath = sfHome().replaceAll("\\", "/");
if (candidateSf === sfHomePath || candidateSf.startsWith(sfHomePath + "/")) {
// Don't chdir to home — return base unchanged.
// resolveProjectRoot() in worktree.ts has the full git-file-based recovery

View file

@ -9,7 +9,7 @@
import { randomUUID } from "node:crypto";
import { mkdirSync, readFileSync, unlinkSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { atomicWriteSync } from "../atomic-write.js";
import { atomicWriteSync, delay } from "../atomic-write.js";
import { ModelPolicyDispatchBlockedError } from "../auto-model-selection.js";
import { runAutomaticAutonomousSolverEval } from "../autonomous-solver-eval.js";
import { debugLog } from "../debug-logger.js";
@ -358,7 +358,7 @@ async function enforceMinRequestInterval(s, prefs) {
if (elapsed < minInterval) {
const waitMs = minInterval - elapsed;
debugLog("autoLoop", { phase: "rate-limit-wait", waitMs });
await new Promise((r) => setTimeout(r, waitMs));
await delay(waitMs);
}
}
}
@ -1264,7 +1264,7 @@ export async function autoLoop(ctx, pi, s, deps) {
dedupe_key: "autonomous-credential-cooldown-wait",
},
);
await new Promise((resolve) => setTimeout(resolve, waitMs));
await delay(waitMs);
finishTurn("retry", "timeout", msg);
continue; // Retry iteration without incrementing consecutiveErrors
}

View file

@ -6,18 +6,15 @@
* without pulling in the full extension dependency tree.
*/
import { appendFileSync, mkdirSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { sfHome } from "../sf-home.js";
/**
* Write a crash log to ~/.sf/crash/<timestamp>.log (or $SF_HOME/crash/).
* Never throws must be safe to call from any error handler.
*/
export function writeCrashLog(err, source) {
try {
const crashDir = join(
process.env.SF_HOME ?? join(homedir(), ".sf"),
"crash",
);
const crashDir = join(sfHome(), "crash");
mkdirSync(crashDir, { recursive: true });
const ts = new Date().toISOString().replace(/[:.]/g, "-");
const logPath = join(crashDir, `${ts}.log`);

View file

@ -1,5 +1,5 @@
import { existsSync, readFileSync, statSync, unlinkSync } from "node:fs";
import { homedir } from "node:os";
import { sfHome } from '../sf-home.js';
import { join } from "node:path";
import {
markCmuxPromptShown,
@ -56,13 +56,13 @@ import {
hasSkillSnapshot,
} from "../skill-discovery.js";
import { deriveState } from "../state.js";
import { extractMarkdownSection } from "../workflow-helpers.js";
import { logWarning } from "../workflow-logger.js";
import {
getActiveWorktreeName,
getWorktreeOriginalCwd,
} from "../worktree-command.js";
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
const _fileReadCache = new Map();
/**
* Read a file with mtime-based caching. Returns the cached content if the
@ -123,7 +123,7 @@ function buildBundledSkillsTable() {
}
function warnDeprecatedAgentInstructions() {
const paths = [
join(sfHome, "agent-instructions.md"),
join(sfHome(), "agent-instructions.md"),
join(process.cwd(), ".sf", "agent-instructions.md"),
];
for (const path of paths) {
@ -209,7 +209,7 @@ export async function buildBeforeAgentStartResult(event, ctx) {
}
}
const { block: knowledgeBlock, globalSizeKb } = loadKnowledgeBlock(
sfHome,
sfHome(),
process.cwd(),
);
const architectureBlock = loadArchitectureBlock(process.cwd());
@ -842,27 +842,6 @@ function extractSliceExecutionExcerpt(content, relPath) {
);
return parts.join("\n");
}
/**
* Extract a markdown section by heading name from content.
* Returns section content until next heading or null if not found.
*/
function extractMarkdownSection(content, heading) {
const match = new RegExp(`^## ${escapeRegExp(heading)}\\s*$`, "m").exec(
content,
);
if (!match) return null;
const start = match.index + match[0].length;
const rest = content.slice(start);
const nextHeading = rest.match(/^##\s+/m);
const end = nextHeading?.index ?? rest.length;
return rest.slice(0, end).trim();
}
/**
* Escape special regex characters in a string.
*/
function escapeRegExp(value) {
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
/**
* Collapse multiple whitespace in text to single spaces.
*/

View file

@ -13,22 +13,21 @@ import {
renameSync,
writeFileSync,
} from "node:fs";
import { homedir } from "node:os";
import { sfHome } from './sf-home.js';
import { dirname, join } from "node:path";
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
// ─── Registry I/O ───────────────────────────────────────────────────────────
/**
* Get the path to the extension registry file.
*/
function getRegistryPath() {
return join(sfHome, "extensions", "registry.json");
return join(sfHome(), "extensions", "registry.json");
}
/**
* Get the path to the agent extensions directory.
*/
function getAgentExtensionsDir() {
return join(sfHome, "agent", "extensions");
return join(sfHome(), "agent", "extensions");
}
/**
* Load the extension registry, defaulting to an empty registry on error.

View file

@ -19,9 +19,7 @@ import {
statSync,
writeFileSync,
} from "node:fs";
import { homedir } from "node:os";
import {
basename,
import { basename,
dirname,
extname,
isAbsolute,
@ -31,18 +29,15 @@ import {
} from "node:path";
import { projectRoot } from "./commands/context.js";
import { repoIdentity } from "./repo-identity.js";
import { sfHome } from "./sf-home.js";
import { PROMOTED_SPEC_PROJECTIONS } from "./spec-projections.js";
function getSfHome() {
return process.env.SF_HOME || join(homedir(), ".sf");
}
// ─── Shared helpers ─────────────────────────────────────────────────────────
function resolveExternalSfRoot() {
const root = projectRoot();
const id = repoIdentity(root);
return join(getSfHome(), "projects", id);
return join(sfHome(), "projects", id);
}
function resolveSourcePath(source) {

View file

@ -1,5 +1,5 @@
import { existsSync, readdirSync, readFileSync } from "node:fs";
import { homedir } from "node:os";
import { sfHome } from '../sf-home.js';
import { join } from "node:path";
import {
loadRegistry,
@ -7,7 +7,6 @@ import {
} from "../workflow-templates.js";
import { resolveProjectRoot } from "../worktree.js";
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
/**
* Comprehensive description of all available SF commands for help text.
*/
@ -613,7 +612,7 @@ function filterOptions(partial, options, prefix = "") {
}
function getExtensionCompletions(prefix, action) {
try {
const extDir = join(sfHome, "agent", "extensions");
const extDir = join(sfHome(), "agent", "extensions");
const ids = [];
for (const entry of readdirSync(extDir, { withFileTypes: true })) {
if (!entry.isDirectory()) continue;

View file

@ -15,10 +15,10 @@ import {
statSync,
} from "node:fs";
import { homedir } from "node:os";
import { sfHome } from './sf-home.js';
import { join } from "node:path";
import { sfRoot } from "./paths.js";
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
// ─── Project File Markers ───────────────────────────────────────────────────────
export const PROJECT_FILES = [
"package.json",
@ -859,20 +859,20 @@ function isMakeTestTargetSafe(basePath) {
* Check if global SF setup exists (has ~/.sf/ with preferences).
*/
export function hasGlobalSetup() {
return existsSync(join(sfHome, "preferences.yaml"));
return existsSync(join(sfHome(), "preferences.yaml"));
}
/**
* Check if this is the very first time SF has been used on this machine.
* Returns true if ~/.sf/ doesn't exist or has no preferences or auth.
*/
export function isFirstEverLaunch() {
if (!existsSync(sfHome)) return true;
if (!existsSync(sfHome())) return true;
// If we have preferences, not first launch
if (existsSync(join(sfHome, "preferences.yaml"))) {
if (existsSync(join(sfHome(), "preferences.yaml"))) {
return false;
}
// If we have auth.json, not first launch (onboarding.ts already ran)
if (existsSync(join(sfHome, "agent", "auth.json"))) return false;
if (existsSync(join(sfHome(), "agent", "auth.json"))) return false;
// Check legacy path too
const legacyPath = join(homedir(), ".pi", "agent", "sf-preferences.md");
if (existsSync(legacyPath)) return false;

View file

@ -15,11 +15,11 @@ import {
statSync,
writeFileSync,
} from "node:fs";
import { homedir } from "node:os";
import { join, relative } from "node:path";
import { formatDuration } from "@singularity-forge/coding-agent";
import { showNextAction } from "../shared/tui.js";
import { atomicWriteSync } from "./atomic-write.js";
import { sfHome } from "./sf-home.js";
import { isAutoActive } from "./auto.js";
import { verifyExpectedArtifact } from "./auto-recovery.js";
import { getAutoWorktreePath } from "./auto-worktree.js";
@ -195,8 +195,7 @@ export async function handleForensics(args, ctx, pi) {
// when import.meta.url resolves to the npm-global install path (Windows).
let sfSourceDir = import.meta.dirname;
if (!existsSync(join(sfSourceDir, "prompts"))) {
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
const fallback = join(sfHome, "agent", "extensions", "sf");
const fallback = join(sfHome(), "agent", "extensions", "sf");
if (existsSync(join(fallback, "prompts"))) sfSourceDir = fallback;
}
const forensicData = formatReportForPrompt(report);

View file

@ -3,7 +3,9 @@
// After each unit completes, extracts durable knowledge from the session
// transcript and stores it as memory entries. One extraction at a time
// (mutex guard). Fire-and-forget — never blocks autonomous mode.
import { readFileSync, statSync } from "node:fs";
import { delay } from "./atomic-write.js";
import {
applyMemoryActions,
decayStaleMemories,
@ -319,7 +321,7 @@ export async function extractMemoriesFromUnit(
// Retry once after a brief delay
if (userPrompt) {
try {
await new Promise((r) => setTimeout(r, 2000));
await delay(2000);
const response2 = await llmCallFn(EXTRACTION_SYSTEM, userPrompt);
const actions2 = parseMemoryResponse(response2);
if (actions2.length > 0) applyMemoryActions(actions2, unitType, unitId);

View file

@ -21,6 +21,7 @@ import {
SAY_MESSAGES,
speakMessage,
} from "../../shared/notify.js";
import { getSfAgentSettingsPath } from "../preferences.js";
const DEFAULT_CONFIG = {
thresholdMs: 2000,
@ -39,7 +40,7 @@ const NotificationAction = {
// Settings Loader
// ─────────────────────────────────────────────────────────────────────────────
async function readSettingsFile() {
const sfPath = path.join(os.homedir(), ".sf", "agent", "settings.json");
const sfPath = getSfAgentSettingsPath();
const piPath = path.join(os.homedir(), ".pi", "agent", "settings.json");
for (const p of [sfPath, piPath]) {
try {
@ -94,7 +95,7 @@ function extractOptionText(action, iconPrefix) {
}
async function saveGlobalSettings(_ctx, updates) {
try {
const sfPath = path.join(os.homedir(), ".sf", "agent", "settings.json");
const sfPath = getSfAgentSettingsPath();
let fileSettings = {};
try {
const content = await fs.readFile(sfPath, "utf8");

View file

@ -11,9 +11,9 @@ import {
unlinkSync,
writeFileSync,
} from "node:fs";
import { homedir } from "node:os";
import { dirname, join } from "node:path";
import { logWarning } from "./workflow-logger.js";
import { sfHome } from "./sf-home.js";
/**
* Bump `FLOW_VERSION` whenever a new required step is added to ONBOARDING_STEPS.
* Records with an older flowVersion are treated as "needs partial re-onboarding"
@ -26,8 +26,7 @@ const RECORD_VERSION = 1;
// resources tsconfig; importing from src/ pulls files outside src/resources
// and breaks the build.
const AGENT_DIR =
process.env.SF_CODING_AGENT_DIR ||
join(process.env.SF_HOME || join(homedir(), ".sf"), "agent");
process.env.SF_CODING_AGENT_DIR || join(sfHome(), "agent");
const FILE = join(AGENT_DIR, "onboarding.json");
const DEFAULT = {
version: RECORD_VERSION,

View file

@ -17,6 +17,7 @@ import {
writeFileSync,
} from "node:fs";
import { join } from "node:path";
import { delay } from "./atomic-write.js";
import {
autoWorktreeBranch,
runWorktreePostCreateHook,
@ -257,7 +258,7 @@ async function waitForWorkerExit(worker, timeoutMs) {
const startedAt = Date.now();
while (Date.now() - startedAt < timeoutMs) {
if (!isPidAlive(worker.pid)) return true;
await new Promise((resolve) => setTimeout(resolve, 50));
await delay(50);
}
return !isPidAlive(worker.pid);
}

View file

@ -16,10 +16,10 @@ import {
readFileSync,
realpathSync,
} from "node:fs";
import { homedir } from "node:os";
import { dirname, join, normalize } from "node:path";
import { DIR_CACHE_MAX } from "./constants.js";
import { nativeScanSfTree } from "./native-parser-bridge.js";
import { sfHome } from "./sf-home.js";
// ─── Directory Listing Cache ──────────────────────────────────────────────────
const dirEntryCache = new Map();
@ -314,7 +314,6 @@ export function sfRoot(basePath) {
}
export const projectRoot = sfRoot;
// ─── Self-Detection & Runtime Root ───────────────────────────────────────────
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
let _isRunningOnSelfCache = null;
/**
* Detect whether SF is running on its own source tree. When true, runtime
@ -373,7 +372,7 @@ export function _resetSelfDetectionCache() {
* they are durable project memory per ADR-001 and remain in the repo.
*/
export function sfRuntimeRoot(basePath) {
if (isRunningOnSelf(basePath)) return sfHome;
if (isRunningOnSelf(basePath)) return sfHome();
return sfRoot(basePath);
}
/**

View file

@ -10,6 +10,7 @@
import * as fs from "node:fs";
import * as path from "node:path";
import { parse } from "shell-quote";
import { getSfAgentSettingsPath } from "../preferences.js";
export const LEVELS = ["minimal", "low", "medium", "high", "bypassed"];
export const PERMISSION_MODES = ["ask", "block"];
export const LEVEL_INDEX = {
@ -226,7 +227,7 @@ function checkOverrides(command, overrides) {
// SETTINGS PERSISTENCE
// ============================================================================
function getSfSettingsPath() {
return path.join(process.env.HOME || "", ".sf", "agent", "settings.json");
return getSfAgentSettingsPath();
}
function getPiSettingsPath() {
return path.join(process.env.HOME || "", ".pi", "agent", "settings.json");

View file

@ -73,6 +73,19 @@ export {
function sfHome() {
return process.env.SF_HOME || join(homedir(), ".sf");
}
/**
* Return the canonical path to the SF agent settings file (~/.sf/agent/settings.json).
*
* Purpose: provide a single source of truth for the settings path so that
* notifications, permissions, and other modules don't each hard-code the same
* homedir join.
*
* Consumer: notifications/notify.js, permissions/permission-core.js.
*/
export function getSfAgentSettingsPath() {
return join(sfHome(), "agent", "settings.json");
}
// Canonical location — pure YAML, no frontmatter markers
function globalPreferencesYamlPath() {
return join(sfHome(), "preferences.yaml");

View file

@ -17,9 +17,9 @@
* that aren't read until the end of a long autonomous mode run.
*/
import { existsSync, readdirSync, readFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { SF_PARSE_ERROR, SFError } from "./errors.js";
import { sfHome } from "./sf-home.js";
import { logWarning } from "./workflow-logger.js";
/**
@ -36,8 +36,7 @@ function resolveExtensionDir() {
const moduleDir = import.meta.dirname;
if (existsSync(join(moduleDir, "prompts"))) return moduleDir;
// Fallback: user-local agent directory
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
const agentSfDir = join(sfHome, "agent", "extensions", "sf");
const agentSfDir = join(sfHome(), "agent", "extensions", "sf");
if (existsSync(join(agentSfDir, "prompts"))) return agentSfDir;
// Last resort: return the module dir (warmCache will silently handle the miss)
return moduleDir;

View file

@ -9,6 +9,7 @@
* markdown file cannot silently drop a quality gate.
*/
import { getGatesForTurn } from "./gate-registry.js";
import { escapeRegExp } from "./workflow-helpers.js";
/**
* Validate that enhanced context content has all required sections.
*
@ -83,12 +84,6 @@ export function validateEnhancedContext(content) {
// heading for every gate owned by that turn. The registry is the source
// of truth for which sections must exist; adding a new gate automatically
// flows through via `getGatesForTurn(turn)`.
/**
* Escape a string so it can be embedded safely inside a regular expression.
*/
function escapeRegExp(value) {
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
/**
* Validate that an artifact contains an `## H2` heading for every gate the
* named turn owns. Returns the list of missing gate section headers.

View file

@ -21,10 +21,8 @@ import {
unlinkSync,
writeFileSync,
} from "node:fs";
import { homedir } from "node:os";
import { basename, dirname, join, resolve } from "node:path";
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
import { sfHome } from "./sf-home.js";
function isRepoMeta(value) {
if (!value || typeof value !== "object") return false;
const v = value;
@ -159,7 +157,7 @@ function isProjectSf(sfPath) {
// Recompute sfHome dynamically so env overrides (SF_HOME) are
// picked up at call time, not just at module load time.
if (stat.isDirectory()) {
const currentSfHome = process.env.SF_HOME || join(homedir(), ".sf");
const currentSfHome = sfHome();
const normalizedSfPath = canonicalizeExistingPath(sfPath);
const normalizedSfHome = canonicalizeExistingPath(currentSfHome);
if (normalizedSfPath === normalizedSfHome) return false;
@ -297,7 +295,7 @@ export function repoIdentity(basePath) {
* otherwise `~/.sf/projects/<hash>`.
*/
export function externalSfRoot(basePath) {
const base = process.env.SF_STATE_DIR || sfHome;
const base = process.env.SF_STATE_DIR || sfHome();
return join(base, "projects", repoIdentity(basePath));
}
/**
@ -305,7 +303,7 @@ export function externalSfRoot(basePath) {
* Honors SF_STATE_DIR override before falling back to SF_HOME.
*/
export function externalProjectsRoot() {
const base = process.env.SF_STATE_DIR || sfHome;
const base = process.env.SF_STATE_DIR || sfHome();
return join(base, "projects");
}
// ─── Numbered Variant Cleanup ────────────────────────────────────────────────
@ -421,7 +419,7 @@ function resolveExternalPathWithRecovery(projectPath) {
const markerId = readSfIdMarker(projectPath);
if (markerId && markerId !== computedId) {
// The marker points to a different identity — the repo was likely moved.
const base = process.env.SF_STATE_DIR || sfHome;
const base = process.env.SF_STATE_DIR || sfHome();
const markerPath = join(base, "projects", markerId);
if (hasProjectState(markerPath)) {
// Recover: use the old state directory and update the marker to the new identity.
@ -490,7 +488,7 @@ function ensureSfSymlinkCore(projectPath) {
// not a project .sf. This can happen if resolveProjectRoot() or
// escapeStaleWorktree() returned ~ as the project root (#1676).
const localSfNormalized = localSf.replaceAll("\\", "/");
const sfHomePath = sfHome.replaceAll("\\", "/");
const sfHomePath = sfHome().replaceAll("\\", "/");
if (localSfNormalized === sfHomePath) {
return localSf;
}

View file

@ -7,8 +7,8 @@
* Consumer: schedule CLI commands (S02), autonomous dispatch reminders, and UI overlays.
*/
import { existsSync, mkdirSync, readFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { sfHome } from "../sf-home.js";
import { sfRoot } from "../paths.js";
import {
countScheduleEntries,
@ -22,9 +22,6 @@ import {
const FILENAME = "schedule.jsonl";
const SCHEDULE_SCHEMA_VERSION = 1;
/** @type {string} */
const _sfHome = process.env.SF_HOME || join(homedir(), ".sf");
// ─── Public API ─────────────────────────────────────────────────────────────
/**
@ -77,7 +74,7 @@ export function resolveSchedulePath(basePath, scope) {
*/
function _resolvePath(basePath, scope) {
if (scope === "global") {
return join(_sfHome, FILENAME);
return join(sfHome(), FILENAME);
}
return join(sfRoot(basePath), FILENAME);
}
@ -158,7 +155,7 @@ function importLegacyScheduleFile(basePath, scope) {
}
function scheduleDbDir(basePath, scope) {
if (scope === "global") return _sfHome;
if (scope === "global") return sfHome();
return sfRoot(basePath);
}

View file

@ -36,8 +36,8 @@ import {
renameSync,
writeFileSync,
} from "node:fs";
import { homedir } from "node:os";
import { dirname, join } from "node:path";
import { sfHome } from "./sf-home.js";
import { resolveMilestoneFile, sfRuntimeRoot } from "./paths.js";
import {
insertSelfFeedbackEntry,
@ -46,7 +46,6 @@ import {
resolveSelfFeedbackEntry,
} from "./sf-db.js";
const SF_HOME = process.env.SF_HOME || join(homedir(), ".sf");
const SELF_FEEDBACK_HEADER =
"# SF Self-Feedback\n\n" +
"Anomalies caught during auto runs (by runtime detectors or via the\n" +
@ -91,8 +90,7 @@ function projectMarkdownPath(basePath) {
return join(sfRuntimeRoot(basePath), "SELF-FEEDBACK.md");
}
function upstreamLogPath() {
const sfHome = process.env.SF_HOME || SF_HOME;
return join(sfHome, "agent", "upstream-feedback.jsonl");
return join(sfHome(), "agent", "upstream-feedback.jsonl");
}
/**
* Return the operator-facing destination for new self-feedback in `basePath`.

View file

@ -24,6 +24,7 @@ import { Container, Markdown, Spacer, Text } from "@singularity-forge/tui";
import { CmuxClient, shellEscape } from "../../cmux/index.js";
import { formatTokenCount } from "../../shared/mod.js";
import { getCurrentPhase } from "../../shared/sf-phase-state.js";
import { delay } from "../atomic-write.js";
import {
buildSiftEnv,
ensureSiftRuntimeDirs,
@ -1109,7 +1110,7 @@ async function waitForFile(filePath, signal, timeoutMs = 30 * 60 * 1000) {
while (Date.now() - started < timeoutMs) {
if (signal?.aborted) return false;
if (fs.existsSync(filePath)) return true;
await new Promise((resolve) => setTimeout(resolve, 150));
await delay(150);
}
return false;
}

View file

@ -7,9 +7,9 @@
*/
import { execFile as execFileCb } from "node:child_process";
import * as fs from "node:fs";
import * as os from "node:os";
import * as path from "node:path";
import { promisify } from "node:util";
import { sfHome } from "../sf-home.js";
const execFile = promisify(execFileCb);
// ============================================================================
@ -20,9 +20,8 @@ export function encodeCwd(cwd) {
// prefixes cannot leak into the isolation path.
return Buffer.from(cwd, "utf8").toString("base64url");
}
const sfHome = process.env.SF_HOME || path.join(os.homedir(), ".sf");
function getIsolationBaseDir(cwd, taskId) {
return path.join(sfHome, "wt", encodeCwd(cwd), taskId);
return path.join(sfHome(), "wt", encodeCwd(cwd), taskId);
}
// Track active isolation dirs for cleanup on exit
const activeIsolations = new Set();

View file

@ -9,6 +9,7 @@
import { loadFile, parseSummary } from "./files.js";
import { relSlicePath, resolveTaskFiles, resolveTasksDir } from "./paths.js";
import { extractMarkdownSection } from "./workflow-helpers.js";
/**
* Extract and format a slice summary as a compact excerpt.
@ -196,22 +197,6 @@ export function isSummaryCleanForSkip(content) {
}
}
function escapeRegExpLocal(value) {
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function extractMarkdownSectionLocal(content, heading) {
const match = new RegExp(`^## ${escapeRegExpLocal(heading)}\\s*$`, "m").exec(
content,
);
if (!match) return null;
const start = match.index + match[0].length;
const rest = content.slice(start);
const nextHeading = rest.match(/^##\s+/m);
const end = nextHeading?.index ?? rest.length;
return rest.slice(0, end).trim();
}
/**
* Extract key sections from a slice PLAN.md for use in task execution prompts.
* Returns Goal, Demo, Verification, and Observability sections as a compact excerpt.
@ -229,8 +214,8 @@ export function extractSliceExecutionExcerpt(content, relPath) {
const lines = content.split("\n");
const goalLine = lines.find((line) => line.startsWith("**Goal:**"))?.trim();
const demoLine = lines.find((line) => line.startsWith("**Demo:**"))?.trim();
const verification = extractMarkdownSectionLocal(content, "Verification");
const observability = extractMarkdownSectionLocal(
const verification = extractMarkdownSection(content, "Verification");
const observability = extractMarkdownSection(
content,
"Observability / Diagnostics",
);

View file

@ -14,6 +14,7 @@
* - Session-end flush: before unit completes, pending syncs are flushed (best-effort)
*/
import { delay } from "./atomic-write.js";
import { syncMemoryToSm } from "./sm-client.js";
/**
@ -191,7 +192,7 @@ async function trySyncWithRetry(item, attempt = 0) {
// Exponential backoff: 1s, 2s, 4s
const delayMs = BACKOFF_BASE_MS * 2 ** attempt;
await new Promise((resolve) => setTimeout(resolve, delayMs));
await delay(delayMs);
return trySyncWithRetry(item, attempt + 1);
}

View file

@ -5,9 +5,10 @@
* SUMMARY.md + UAT.md to disk, then writes the slice row to DB in a
* transaction, toggles the roadmap checkbox, and invalidates caches.
*/
import { promises as fs, constants as fsConstants, mkdirSync } from "node:fs";
import { dirname, join } from "node:path";
import { mkdirSync } from "node:fs";
import { join } from "node:path";
import { atomicWriteAsync } from "../atomic-write.js";
import { ensureWritableParent, errorMessage } from "./tool-helpers.js";
import { clearParseCache, extractUatType } from "../files.js";
import { getGatesForTurn } from "../gate-registry.js";
import { renderRoadmapCheckboxes } from "../markdown-renderer.js";
@ -39,14 +40,6 @@ import { logError, logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
async function ensureWritableParent(filePath) {
const parentDir = dirname(filePath);
await fs.mkdir(parentDir, { recursive: true });
await fs.access(parentDir, fsConstants.W_OK);
}
function errorMessage(error) {
return error instanceof Error ? error.message : String(error);
}
function yamlScalar(value) {
if (/^[A-Za-z0-9_.-]+$/.test(value)) return value;
return JSON.stringify(value);

View file

@ -5,9 +5,10 @@
* task row to DB in a transaction, toggles the plan checkbox, and invalidates
* caches.
*/
import { promises as fs, constants as fsConstants, mkdirSync } from "node:fs";
import { dirname, join } from "node:path";
import { mkdirSync } from "node:fs";
import { join } from "node:path";
import { atomicWriteAsync } from "../atomic-write.js";
import { ensureWritableParent, errorMessage } from "./tool-helpers.js";
import { clearParseCache } from "../files.js";
import { getGatesForTurn } from "../gate-registry.js";
import { renderPlanCheckboxes } from "../markdown-renderer.js";
@ -159,14 +160,6 @@ function normalizeCompleteTaskParams(params) {
),
};
}
async function ensureWritableParent(filePath) {
const parentDir = dirname(filePath);
await fs.mkdir(parentDir, { recursive: true });
await fs.access(parentDir, fsConstants.W_OK);
}
function errorMessage(error) {
return error instanceof Error ? error.message : String(error);
}
async function writeSummaryBeforeDb(filePath, content) {
try {
await ensureWritableParent(filePath);

View file

@ -20,10 +20,8 @@ import {
normalizePlanningText,
normalizePlanningTextArray,
} from "../validation.js";
import { appendEvent } from "../workflow-events.js";
import { logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
import { runPostMutationHook } from "./tool-helpers.js";
import { scaffoldMilestoneSlices } from "../workflow-templates.js";
function validateRiskEntries(value) {
@ -531,23 +529,14 @@ export async function handlePlanMilestone(rawParams, basePath) {
}
}
// ── Post-mutation hook: projections, manifest, event log ───────────────
try {
await renderAllProjections(basePath, params.milestoneId);
writeManifest(basePath);
appendEvent(basePath, {
cmd: "plan-milestone",
params: { milestoneId: params.milestoneId },
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
});
} catch (hookErr) {
logWarning(
"tool",
`plan-milestone post-mutation hook warning: ${hookErr.message}`,
);
}
await runPostMutationHook(basePath, params.milestoneId, {
cmd: "plan-milestone",
params: { milestoneId: params.milestoneId },
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
}, { caller: "plan-milestone" });
return {
milestoneId: params.milestoneId,
title: params.title,

View file

@ -19,10 +19,8 @@ import { invalidateStateCache } from "../state.js";
import { isClosedStatus } from "../status-guards.js";
import { taskFrontmatterFromRecord } from "../task-frontmatter.js";
import { isNonEmptyString, normalizePlanningText } from "../validation.js";
import { appendEvent } from "../workflow-events.js";
import { logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
import { runPostMutationHook } from "./tool-helpers.js";
const PLANNING_MEETING_REQUIRED_MESSAGE =
"planningMeeting must be a populated object — write at least 2-3 perspectives. Skipping is not allowed.";
@ -386,23 +384,14 @@ export async function handlePlanSlice(rawParams, basePath) {
invalidateStateCache();
clearParseCache();
// ── Post-mutation hook: projections, manifest, event log ─────────────
try {
await renderAllProjections(basePath, params.milestoneId);
writeManifest(basePath);
appendEvent(basePath, {
cmd: "plan-slice",
params: { milestoneId: params.milestoneId, sliceId: params.sliceId },
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
});
} catch (hookErr) {
logWarning(
"tool",
`plan-slice post-mutation hook warning: ${hookErr.message}`,
);
}
await runPostMutationHook(basePath, params.milestoneId, {
cmd: "plan-slice",
params: { milestoneId: params.milestoneId, sliceId: params.sliceId },
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
}, { caller: "plan-slice" });
return {
milestoneId: params.milestoneId,
sliceId: params.sliceId,

View file

@ -15,10 +15,8 @@ import {
normalizePlanningText,
normalizePlanningTextArray,
} from "../validation.js";
import { appendEvent } from "../workflow-events.js";
import { logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
import { runPostMutationHook } from "./tool-helpers.js";
function validateParams(params) {
if (!isNonEmptyString(params?.milestoneId))
@ -150,27 +148,18 @@ export async function handlePlanTask(rawParams, basePath) {
invalidateStateCache();
clearParseCache();
// ── Post-mutation hook: projections, manifest, event log ─────────────
try {
await renderAllProjections(basePath, params.milestoneId);
writeManifest(basePath);
appendEvent(basePath, {
cmd: "plan-task",
params: {
milestoneId: params.milestoneId,
sliceId: params.sliceId,
taskId: params.taskId,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
});
} catch (hookErr) {
logWarning(
"tool",
`plan-task post-mutation hook warning: ${hookErr.message}`,
);
}
await runPostMutationHook(basePath, params.milestoneId, {
cmd: "plan-task",
params: {
milestoneId: params.milestoneId,
sliceId: params.sliceId,
taskId: params.taskId,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
}, { caller: "plan-task" });
return {
milestoneId: params.milestoneId,
sliceId: params.sliceId,

View file

@ -20,10 +20,8 @@ import {
import { invalidateStateCache } from "../state.js";
import { isClosedStatus } from "../status-guards.js";
import { isNonEmptyString } from "../validation.js";
import { appendEvent } from "../workflow-events.js";
import { logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
import { runPostMutationHook } from "./tool-helpers.js";
function validateParams(params) {
if (!isNonEmptyString(params?.milestoneId))
@ -280,26 +278,17 @@ export async function handleReassessRoadmap(rawParams, basePath) {
invalidateStateCache();
clearParseCache();
// ── Post-mutation hook: projections, manifest, event log ─────
try {
await renderAllProjections(basePath, params.milestoneId);
writeManifest(basePath);
appendEvent(basePath, {
cmd: "reassess-roadmap",
params: {
milestoneId: params.milestoneId,
completedSliceId: params.completedSliceId,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
});
} catch (hookErr) {
logWarning(
"tool",
`reassess-roadmap post-mutation hook warning: ${hookErr.message}`,
);
}
await runPostMutationHook(basePath, params.milestoneId, {
cmd: "reassess-roadmap",
params: {
milestoneId: params.milestoneId,
completedSliceId: params.completedSliceId,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
}, { caller: "reassess-roadmap" });
return {
milestoneId: params.milestoneId,
completedSliceId: params.completedSliceId,

View file

@ -27,17 +27,12 @@ import {
} from "../sf-db.js";
import { invalidateStateCache } from "../state.js";
import { isClosedStatus } from "../status-guards.js";
import { appendEvent } from "../workflow-events.js";
import { isNonEmptyString } from "../validation.js";
import { logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
import { runPostMutationHook } from "./tool-helpers.js";
export async function handleReopenMilestone(params, basePath) {
// ── Validate required fields ────────────────────────────────────────────
if (
!params.milestoneId ||
typeof params.milestoneId !== "string" ||
params.milestoneId.trim() === ""
) {
if (!isNonEmptyString(params.milestoneId)) {
return { error: "milestoneId is required and must be a non-empty string" };
}
// ── Guards + DB writes inside a single transaction (prevents TOCTOU) ───
@ -109,28 +104,19 @@ export async function handleReopenMilestone(params, basePath) {
}
clearPathCache();
// ── Post-mutation hook ───────────────────────────────────────────────────
try {
await renderAllProjections(basePath, params.milestoneId);
writeManifest(basePath);
appendEvent(basePath, {
cmd: "reopen-milestone",
params: {
milestoneId: params.milestoneId,
reason: params.reason ?? null,
slicesReset: slicesResetCount,
tasksReset: tasksResetCount,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
});
} catch (hookErr) {
logWarning(
"tool",
`reopen-milestone post-mutation hook warning: ${hookErr.message}`,
);
}
await runPostMutationHook(basePath, params.milestoneId, {
cmd: "reopen-milestone",
params: {
milestoneId: params.milestoneId,
reason: params.reason ?? null,
slicesReset: slicesResetCount,
tasksReset: tasksResetCount,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
}, { caller: "reopen-milestone" });
return {
milestoneId: params.milestoneId,
slicesReset: slicesResetCount,

View file

@ -22,24 +22,15 @@ import {
} from "../sf-db.js";
import { invalidateStateCache } from "../state.js";
import { isClosedStatus } from "../status-guards.js";
import { appendEvent } from "../workflow-events.js";
import { isNonEmptyString } from "../validation.js";
import { logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
import { runPostMutationHook } from "./tool-helpers.js";
export async function handleReopenSlice(params, basePath) {
// ── Validate required fields ────────────────────────────────────────────
if (
!params.sliceId ||
typeof params.sliceId !== "string" ||
params.sliceId.trim() === ""
) {
if (!isNonEmptyString(params.sliceId)) {
return { error: "sliceId is required and must be a non-empty string" };
}
if (
!params.milestoneId ||
typeof params.milestoneId !== "string" ||
params.milestoneId.trim() === ""
) {
if (!isNonEmptyString(params.milestoneId)) {
return { error: "milestoneId is required and must be a non-empty string" };
}
// ── Guards + DB writes inside a single transaction (prevents TOCTOU) ───
@ -112,28 +103,19 @@ export async function handleReopenSlice(params, basePath) {
}
clearPathCache();
// ── Post-mutation hook ───────────────────────────────────────────────────
try {
await renderAllProjections(basePath, params.milestoneId);
writeManifest(basePath);
appendEvent(basePath, {
cmd: "reopen-slice",
params: {
milestoneId: params.milestoneId,
sliceId: params.sliceId,
reason: params.reason ?? null,
tasksReset: tasksResetCount,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
});
} catch (hookErr) {
logWarning(
"tool",
`reopen-slice post-mutation hook warning: ${hookErr.message}`,
);
}
await runPostMutationHook(basePath, params.milestoneId, {
cmd: "reopen-slice",
params: {
milestoneId: params.milestoneId,
sliceId: params.sliceId,
reason: params.reason ?? null,
tasksReset: tasksResetCount,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
}, { caller: "reopen-slice" });
return {
milestoneId: params.milestoneId,
sliceId: params.sliceId,

View file

@ -20,31 +20,18 @@ import {
} from "../sf-db.js";
import { invalidateStateCache } from "../state.js";
import { isClosedStatus } from "../status-guards.js";
import { appendEvent } from "../workflow-events.js";
import { isNonEmptyString } from "../validation.js";
import { logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
import { runPostMutationHook } from "./tool-helpers.js";
export async function handleReopenTask(params, basePath) {
// ── Validate required fields ────────────────────────────────────────────
if (
!params.taskId ||
typeof params.taskId !== "string" ||
params.taskId.trim() === ""
) {
if (!isNonEmptyString(params.taskId)) {
return { error: "taskId is required and must be a non-empty string" };
}
if (
!params.sliceId ||
typeof params.sliceId !== "string" ||
params.sliceId.trim() === ""
) {
if (!isNonEmptyString(params.sliceId)) {
return { error: "sliceId is required and must be a non-empty string" };
}
if (
!params.milestoneId ||
typeof params.milestoneId !== "string" ||
params.milestoneId.trim() === ""
) {
if (!isNonEmptyString(params.milestoneId)) {
return { error: "milestoneId is required and must be a non-empty string" };
}
// ── Guards + DB write inside a single transaction (prevents TOCTOU) ────
@ -110,28 +97,19 @@ export async function handleReopenTask(params, basePath) {
}
clearPathCache();
// ── Post-mutation hook ───────────────────────────────────────────────────
try {
await renderAllProjections(basePath, params.milestoneId);
writeManifest(basePath);
appendEvent(basePath, {
cmd: "reopen-task",
params: {
milestoneId: params.milestoneId,
sliceId: params.sliceId,
taskId: params.taskId,
reason: params.reason ?? null,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
});
} catch (hookErr) {
logWarning(
"tool",
`reopen-task post-mutation hook warning: ${hookErr.message}`,
);
}
await runPostMutationHook(basePath, params.milestoneId, {
cmd: "reopen-task",
params: {
milestoneId: params.milestoneId,
sliceId: params.sliceId,
taskId: params.taskId,
reason: params.reason ?? null,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
}, { caller: "reopen-task" });
return {
milestoneId: params.milestoneId,
sliceId: params.sliceId,

View file

@ -22,10 +22,8 @@ import {
normalizePlanningText,
normalizePlanningTextArray,
} from "../validation.js";
import { appendEvent } from "../workflow-events.js";
import { logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
import { runPostMutationHook } from "./tool-helpers.js";
function validateParams(params) {
if (!isNonEmptyString(params?.milestoneId))
@ -311,27 +309,18 @@ export async function handleReplanSlice(rawParams, basePath) {
invalidateStateCache();
clearParseCache();
// ── Post-mutation hook: projections, manifest, event log ─────
try {
await renderAllProjections(basePath, params.milestoneId);
writeManifest(basePath);
appendEvent(basePath, {
cmd: "replan-slice",
params: {
milestoneId: params.milestoneId,
sliceId: params.sliceId,
blockerTaskId: params.blockerTaskId,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
});
} catch (hookErr) {
logWarning(
"tool",
`replan-slice post-mutation hook warning: ${hookErr.message}`,
);
}
await runPostMutationHook(basePath, params.milestoneId, {
cmd: "replan-slice",
params: {
milestoneId: params.milestoneId,
sliceId: params.sliceId,
blockerTaskId: params.blockerTaskId,
},
ts: new Date().toISOString(),
actor: "agent",
actor_name: params.actorName,
trigger_reason: params.triggerReason,
}, { caller: "replan-slice" });
return {
milestoneId: params.milestoneId,
sliceId: params.sliceId,

View file

@ -0,0 +1,71 @@
/**
* tool-helpers.js Shared low-level utilities for SF tool handlers.
*
* Purpose: consolidate helpers that appear in multiple tool handler files
* (complete-task.js, complete-slice.js, etc.) to avoid silent divergence.
*
* Consumer: complete-task.js, complete-slice.js, and any future tool that
* writes summary files or formats error messages.
*/
import { promises as fs, constants as fsConstants } from "node:fs";
import { dirname } from "node:path";
import { appendEvent } from "../workflow-events.js";
import { logWarning } from "../workflow-logger.js";
import { writeManifest } from "../workflow-manifest.js";
import { renderAllProjections } from "../workflow-projections.js";
/**
* Ensure the parent directory of filePath exists and is writable.
* Creates missing directories with recursive mkdir before checking access.
*
* Purpose: guard summary-file writes so the error message names the directory
* rather than producing a cryptic ENOENT or EACCES on the file itself.
*
* Consumer: complete-task.js, complete-slice.js writeSummaryBeforeDb helpers.
*/
export async function ensureWritableParent(filePath) {
const parentDir = dirname(filePath);
await fs.mkdir(parentDir, { recursive: true });
await fs.access(parentDir, fsConstants.W_OK);
}
/**
* Extract a human-readable message from an unknown caught value.
*
* Purpose: produce a string regardless of whether the thrown value is an
* Error instance or a plain string/object, so callers don't need a ternary.
*
* Consumer: complete-task.js, complete-slice.js error formatting.
*/
export function errorMessage(error) {
return error instanceof Error ? error.message : String(error);
}
/**
* Run the standard post-mutation hook: project all projections, write the
* manifest, and append an event to the workflow log.
*
* Purpose: the three-step sequence (renderAllProjections + writeManifest +
* appendEvent) appears in every tool that mutates planning state. Extracting
* it here prevents the steps from drifting independently across tool files.
*
* Consumer: plan-task.js, plan-slice.js, plan-milestone.js, replan-slice.js,
* reassess-roadmap.js, reopen-slice.js, reopen-task.js, reopen-milestone.js.
*
* @param {string} basePath - Project base path.
* @param {string} milestoneId - Milestone ID for projection scoping.
* @param {object} event - Full event object passed to appendEvent.
* @param {{ caller: string }} opts - caller is the tool name used in the warning message.
*/
export async function runPostMutationHook(basePath, milestoneId, event, { caller }) {
try {
await renderAllProjections(basePath, milestoneId);
writeManifest(basePath);
appendEvent(basePath, event);
} catch (hookErr) {
logWarning(
"tool",
`${caller} post-mutation hook warning: ${hookErr.message}`,
);
}
}

View file

@ -10,7 +10,7 @@
*/
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { dirname, join } from "node:path";
import {
buildChallengePrompt,
buildCompleteMilestonePrompt,
@ -264,33 +264,50 @@ function findMissingSummaries(basePath, mid) {
}
// ─── Rewrite Circuit Breaker ──────────────────────────────────────────────
const MAX_REWRITE_ATTEMPTS = 3;
// ─── DB-with-disk-fallback counter factory ───────────────────────────────
// Counters must survive session restarts (crash recovery, pause/resume).
// The factory returns { get, set, increment } backed by SQLite when available
// and a JSON file on disk otherwise.
function makeDiskCounter(dbKey, diskPath) {
return {
get() {
if (isDbAvailable()) return getRuntimeCounter(dbKey);
try {
const data = JSON.parse(readFileSync(diskPath, "utf-8"));
return typeof data.count === "number" ? data.count : 0;
} catch {
return 0;
}
},
set(count) {
if (isDbAvailable()) {
setRuntimeCounter(dbKey, count);
return;
}
mkdirSync(dirname(diskPath), { recursive: true });
writeFileSync(
diskPath,
JSON.stringify({ count, updatedAt: new Date().toISOString() }) + "\n",
);
},
increment() {
if (isDbAvailable()) return incrementRuntimeCounter(dbKey);
const count = this.get() + 1;
this.set(count);
return count;
},
};
}
// ─── Disk-persisted rewrite attempt counter ──────────────────────────────────
// The counter must survive session restarts (crash recovery, pause/resume,
// assisted mode). Storing it on the in-memory session object caused the circuit
// breaker to never trip — see https://github.com/singularity-forge/sf-run/issues/2203
// See https://github.com/singularity-forge/sf-run/issues/2203
function rewriteCountPath(basePath) {
return join(sfRoot(basePath), "runtime", "rewrite-count.json");
}
export function getRewriteCount(basePath) {
if (isDbAvailable()) return getRuntimeCounter("rewrite-count");
try {
const data = JSON.parse(readFileSync(rewriteCountPath(basePath), "utf-8"));
return typeof data.count === "number" ? data.count : 0;
} catch {
return 0;
}
return makeDiskCounter("rewrite-count", rewriteCountPath(basePath)).get();
}
export function setRewriteCount(basePath, count) {
if (isDbAvailable()) {
setRuntimeCounter("rewrite-count", count);
return;
}
const filePath = rewriteCountPath(basePath);
mkdirSync(join(sfRoot(basePath), "runtime"), { recursive: true });
writeFileSync(
filePath,
JSON.stringify({ count, updatedAt: new Date().toISOString() }) + "\n",
);
makeDiskCounter("rewrite-count", rewriteCountPath(basePath)).set(count);
}
// ─── Run-UAT dispatch counter (per-slice) ────────────────────────────────
// Caps run-uat dispatches to prevent infinite replay when verification
@ -301,27 +318,11 @@ function uatCountPath(basePath, mid, sid) {
}
export function getUatCount(basePath, mid, sid) {
const key = `uat-count:${mid}:${sid}`;
if (isDbAvailable()) return getRuntimeCounter(key);
try {
const data = JSON.parse(
readFileSync(uatCountPath(basePath, mid, sid), "utf-8"),
);
return typeof data.count === "number" ? data.count : 0;
} catch {
return 0;
}
return makeDiskCounter(key, uatCountPath(basePath, mid, sid)).get();
}
export function incrementUatCount(basePath, mid, sid) {
const key = `uat-count:${mid}:${sid}`;
if (isDbAvailable()) return incrementRuntimeCounter(key);
const count = getUatCount(basePath, mid, sid) + 1;
const filePath = uatCountPath(basePath, mid, sid);
mkdirSync(join(sfRoot(basePath), "runtime"), { recursive: true });
writeFileSync(
filePath,
JSON.stringify({ count, updatedAt: new Date().toISOString() }) + "\n",
);
return count;
return makeDiskCounter(key, uatCountPath(basePath, mid, sid)).increment();
}
// ─── Helpers ─────────────────────────────────────────────────────────────
/**

View file

@ -11,6 +11,7 @@
import { mkdtempSync, rmSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { delay } from "../atomic-write.js";
const DEFAULT_LATENCY_PROBABILITY = 0.05;
const DEFAULT_PARTIAL_FAILURE_PROBABILITY = 0.03;
@ -20,10 +21,6 @@ const DEFAULT_MAX_LATENCY_MS = 5000;
const DEFAULT_DISK_STRESS_MB = 50;
const DEFAULT_MEMORY_STRESS_MB = 100;
function randomDelay(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
function randomInRange(min, max) {
return min + Math.random() * (max - min);
}
@ -92,12 +89,12 @@ export class ChaosMonkey {
// 1. Latency injection
if (Math.random() < this.latencyProbability) {
const delay = Math.floor(randomInRange(100, this.maxLatencyMs));
const latencyMs = Math.floor(randomInRange(100, this.maxLatencyMs));
console.error(
`[CHAOS MONKEY] Injecting ${delay}ms latency during phase: ${phase}`,
`[CHAOS MONKEY] Injecting ${latencyMs}ms latency during phase: ${phase}`,
);
this._injected.push({ type: "latency", phase, delay });
await randomDelay(delay);
this._injected.push({ type: "latency", phase, delay: latencyMs });
await delay(latencyMs);
}
// 2. Partial failure (non-fatal error throw)

View file

@ -9,9 +9,9 @@
* Never throws any I/O failure returns 0.
*/
import { existsSync, readFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { readAllSelfFeedback, recordSelfFeedback } from "./self-feedback.js";
import { sfHome } from "./sf-home.js";
// ─── Constants ────────────────────────────────────────────────────────────────
const SEVERITY_ORDER = ["low", "medium", "high", "critical"];
@ -21,8 +21,7 @@ const THRESHOLD_REPOS = 2;
const THIRTY_DAYS_MS = 30 * 24 * 60 * 60 * 1000;
// ─── Helpers ──────────────────────────────────────────────────────────────────
function getUpstreamLogPath() {
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
return join(sfHome, "agent", "upstream-feedback.jsonl");
return join(sfHome(), "agent", "upstream-feedback.jsonl");
}
function isForgeRepo(basePath) {
try {

View file

@ -16,9 +16,9 @@ import { isDbAvailable } from "./sf-db.js";
/**
* Escape regex special characters for safe use in RegExp.
* @internal Helper
* Purpose: shared utility for markdown section extraction.
*/
function escapeRegExp(value) {
export function escapeRegExp(value) {
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
@ -32,9 +32,9 @@ function oneLine(text) {
/**
* Extract a markdown section by heading.
* @internal Helper
* Purpose: shared utility for markdown section extraction.
*/
function extractMarkdownSection(content, heading) {
export function extractMarkdownSection(content, heading) {
const match = new RegExp(`^## ${escapeRegExp(heading)}\\s*$`, "m").exec(
content,
);

View file

@ -23,7 +23,7 @@ import {
unlinkSync,
writeFileSync,
} from "node:fs";
import { homedir } from "node:os";
import { sfHome } from './sf-home.js';
import { extname, join, sep as pathSep, resolve } from "node:path";
import { parse as parseYaml } from "yaml";
import { validateDefinition } from "./definition-loader.js";
@ -32,7 +32,6 @@ import { validateDefinition } from "./definition-loader.js";
const MAX_RESPONSE_BYTES = 256 * 1024;
const FETCH_TIMEOUT_MS = 15_000;
const PROVENANCE_FILE = ".installed.json";
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
function provenancePath(dir) {
return join(dir, PROVENANCE_FILE);
}
@ -54,7 +53,7 @@ function writeProvenance(dir, data) {
);
}
export function globalInstallDir() {
return join(sfHome, "workflows");
return join(sfHome(), "workflows");
}
export function projectInstallDir(basePath) {
return join(basePath, ".sf", "workflows");

View file

@ -13,19 +13,18 @@
* Precedence: project > global > bundled. Same-named file wins.
*/
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
import { homedir } from "node:os";
import { sfHome } from './sf-home.js';
import { basename, extname, join } from "node:path";
import { parse as parseYaml } from "yaml";
import { loadRegistry } from "./workflow-templates.js";
// ─── Path resolution ─────────────────────────────────────────────────────
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
function resolveBundledDir() {
const moduleDir = import.meta.dirname;
const local = join(moduleDir, "workflow-templates");
if (existsSync(local)) return local;
const agentSfDir = join(
sfHome,
sfHome(),
"agent",
"extensions",
"sf",
@ -35,7 +34,7 @@ function resolveBundledDir() {
return local;
}
function globalPluginsDir() {
return join(sfHome, "workflows");
return join(sfHome(), "workflows");
}
function projectPluginsDir(basePath) {
return join(basePath, ".sf", "workflows");

View file

@ -5,8 +5,8 @@
* alias, or trigger-keyword matching against user input.
*/
import { existsSync, readFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { sfHome } from "./sf-home.js";
const __extensionDir = resolveSfExtensionDir();
const registryPath = join(
@ -21,8 +21,7 @@ const registryPath = join(
function resolveSfExtensionDir() {
const moduleDir = import.meta.dirname;
if (existsSync(join(moduleDir, "workflow-templates"))) return moduleDir;
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
const agentSfDir = join(sfHome, "agent", "extensions", "sf");
const agentSfDir = join(sfHome(), "agent", "extensions", "sf");
if (existsSync(join(agentSfDir, "workflow-templates"))) return agentSfDir;
return moduleDir;
}

View file

@ -1,10 +1,6 @@
import { existsSync, readFileSync, realpathSync, statSync } from "node:fs";
import { homedir } from "node:os";
import { join, resolve } from "node:path";
function sfHome() {
return process.env.SF_HOME || join(homedir(), ".sf");
}
import { sfHome } from "./sf-home.js";
export function normalizeWorktreePathForCompare(path) {
let normalized;
try {

View file

@ -12,10 +12,10 @@
* SLICE_BRANCH_RE) remain for backwards compatibility with legacy branches.
*/
import { existsSync, readFileSync, realpathSync, utimesSync } from "node:fs";
import { homedir } from "node:os";
import { join, resolve } from "node:path";
import { GitService, writeIntegrationBranch } from "./git-service.js";
import { loadEffectiveSFPreferences } from "./preferences.js";
import { sfHome } from "./sf-home.js";
import { detectWorktreeName, findWorktreeSegment } from "./worktree-detect.js";
export { MergeConflictError } from "./git-service.js";
@ -106,11 +106,9 @@ export function resolveProjectRoot(basePath) {
// Layer 2: Guard against resolving to the user's home directory.
// When .sf is a symlink into ~/.sf/projects/<hash>, the resolved path
// contains /.sf/ at the user-level boundary. Slicing there yields ~ — wrong.
const sfHome = normalizePathForCompare(
process.env.SF_HOME || join(homedir(), ".sf"),
);
const sfHomePath = normalizePathForCompare(sfHome());
const candidateSfPath = normalizePathForCompare(join(candidate, ".sf"));
if (candidateSfPath === sfHome || candidateSfPath.startsWith(sfHome + "/")) {
if (candidateSfPath === sfHomePath || candidateSfPath.startsWith(sfHomePath + "/")) {
// The candidate is the home directory (or within it in a way that .sf
// maps to the user-level SF dir). Try to recover the real project root
// from the worktree's .git file.