feat(sf): workflow-plugins port, sf-db expansions, worktree-manager hardening

- workflow-plugins.ts: new — unified plugin discovery, 4 execution modes
  (oneshot, yaml-step, markdown-phase, auto-milestone), hot-reload support
- sf-db.ts: add milestone ghosting/reservation, hook_runs table, memory
  embedding schema, subscription token usage tracking
- worktree-manager.ts: active-worktree tracking, health check cascade,
  dangling-ref pruning, sync-on-switch
- atomic-write.ts: add writeJsonAtomic convenience wrapper
- workflow-logger.ts: add "plugins" LogComponent variant
- workflow-templates.ts: template hot-reload + validation sweep
- scaffold-versioning.ts: versioned drift detection improvements
- preferences-migrations.ts: v3→v4 subscription cost fields migration
- self-feedback.ts: feedback loop dedup window
- headless.ts: EXIT_RELOAD + notification dedup boundary (final)
- tests/auto-vs-autonomous.test.ts: expand coverage for both code paths

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Mikael Hugo 2026-05-02 02:20:14 +02:00
parent b719169ed5
commit df8fca8cc7
13 changed files with 610 additions and 84 deletions

View file

@ -1011,9 +1011,18 @@ async function runHeadlessOnce(
? MULTI_TURN_DEADLOCK_BACKSTOP_MS
: IDLE_TIMEOUT_MS;
// Grace period after the last interactive tool ends before re-arming the
// idle timer. Prevents the timer firing before the LLM has a chance to
// process the tool response (e.g. a fast-returning interactive tool).
const INTERACTIVE_TOOL_GRACE_MS = 500;
let lastInteractiveToolEndTime = 0;
function resetIdleTimer(): void {
if (idleTimer) clearTimeout(idleTimer);
const inGracePeriod =
Date.now() - lastInteractiveToolEndTime < INTERACTIVE_TOOL_GRACE_MS;
if (
!inGracePeriod &&
shouldArmHeadlessIdleTimeout(toolCallCount, interactiveToolCallIds.size)
) {
idleTimer = setTimeout(() => {
@ -1090,6 +1099,9 @@ async function runHeadlessOnce(
} else if (eventType === "tool_execution_end") {
const toolCallId = String(eventObj.toolCallId ?? eventObj.id ?? "");
if (toolCallId) {
if (interactiveToolCallIds.has(toolCallId)) {
lastInteractiveToolEndTime = Date.now();
}
interactiveToolCallIds.delete(toolCallId);
}
// Close the tool span if tracing is active

View file

@ -82,16 +82,20 @@ function isTransientLockError(error: unknown): boolean {
function buildAtomicWriteError(
filePath: string,
attempts: number,
error: unknown,
errors: unknown[],
): Error {
const code = normalizeErrnoCode(error) ?? "UNKNOWN";
const message = error instanceof Error ? error.message : String(error);
const lastError = errors[errors.length - 1];
const code = normalizeErrnoCode(lastError) ?? "UNKNOWN";
const messages = errors.map(
(e, i) =>
` attempt ${i + 1}: [${normalizeErrnoCode(e) ?? "UNKNOWN"}] ${e instanceof Error ? e.message : String(e)}`,
);
const wrapped = new Error(
`Atomic write to ${filePath} failed after ${attempts} attempts (last error code: ${code}): ${message}`,
`Atomic write to ${filePath} failed after ${attempts} attempts:\n${messages.join("\n")}`,
) as NodeJS.ErrnoException;
wrapped.code = code;
if (error instanceof Error && "stack" in error && error.stack) {
wrapped.stack = error.stack;
if (lastError instanceof Error && "stack" in lastError && lastError.stack) {
wrapped.stack = lastError.stack;
}
return wrapped;
}

View file

@ -17,6 +17,7 @@ import type {
} from "@singularity-forge/pi-coding-agent";
import { detectAbandonMilestone } from "./abandon-detect.js";
import type { AutoSession, SidecarItem } from "./auto/session.js";
import { isDeterministicPolicyError } from "./auto-tool-tracking.js";
import { resolveExpectedArtifactPath as resolveArtifactForContent } from "./auto-artifact-paths.js";
import {
diagnoseExpectedArtifact,

View file

@ -15,7 +15,7 @@ import {
} from "./crash-recovery.js";
import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js";
import { saveFile } from "./files.js";
import { ensureGitignore, isSfGitignored } from "./gitignore.js";
import { SF_RUNTIME_PATTERNS, ensureGitignore, isSfGitignored } from "./gitignore.js";
import { recoverFailedMigration } from "./migrate-external.js";
import {
nativeForEachRef,

View file

@ -67,6 +67,9 @@ export interface MigrationOutcome {
* Treats `version === undefined` as version 1 (the implicit pre-versioning
* baseline) so old projects without explicit version get migrated.
*/
/**
* Apply forward migrations to transform preferences to the current schema version.
*/
export function migrateForward(input: SFPreferences): MigrationOutcome {
const startVersion = input.version ?? 1;
if (startVersion > CURRENT_PREFERENCES_SCHEMA_VERSION) {

View file

@ -209,6 +209,22 @@ function manifestPath(basePath: string): string {
return join(basePath, SCAFFOLD_MANIFEST_RELPATH);
}
/**
* Type guard to validate that an unknown value is a ScaffoldManifestEntry.
* Used to filter unparsed JSON into strongly-typed entries.
*/
function isScaffoldManifestEntry(e: unknown): e is ScaffoldManifestEntry {
return (
!!e &&
typeof e === "object" &&
typeof (e as ScaffoldManifestEntry).path === "string" &&
typeof (e as ScaffoldManifestEntry).template === "string" &&
typeof (e as ScaffoldManifestEntry).version === "string" &&
typeof (e as ScaffoldManifestEntry).appliedAt === "string" &&
typeof (e as ScaffoldManifestEntry).contentHash === "string"
);
}
/**
* Read `.sf/scaffold-manifest.json`. Never throws. Missing or corrupt files
* yield an empty manifest at the current schema version.
@ -239,14 +255,7 @@ export function readScaffoldManifest(basePath: string): ScaffoldManifest {
return { schemaVersion: 1, applied: [] };
}
const applied = (parsed as { applied: unknown[] }).applied.filter(
(e): e is ScaffoldManifestEntry =>
!!e &&
typeof e === "object" &&
typeof (e as ScaffoldManifestEntry).path === "string" &&
typeof (e as ScaffoldManifestEntry).template === "string" &&
typeof (e as ScaffoldManifestEntry).version === "string" &&
typeof (e as ScaffoldManifestEntry).appliedAt === "string" &&
typeof (e as ScaffoldManifestEntry).contentHash === "string",
isScaffoldManifestEntry,
);
return { schemaVersion: 1, applied };
}

View file

@ -37,6 +37,7 @@ import {
} from "node:fs";
import { homedir } from "node:os";
import { dirname, join } from "node:path";
import { sfRuntimeRoot } from "./paths.js";
const SF_HOME = process.env.SF_HOME || join(homedir(), ".sf");
const UPSTREAM_LOG = join(SF_HOME, "agent", "upstream-feedback.jsonl");
@ -171,11 +172,11 @@ function newId(): string {
// ─── Path helpers ──────────────────────────────────────────────────────────
function projectJsonlPath(basePath: string): string {
return join(basePath, ".sf", "self-feedback.jsonl");
return join(sfRuntimeRoot(basePath), "self-feedback.jsonl");
}
function projectMarkdownPath(basePath: string): string {
return join(basePath, ".sf", "BACKLOG.md");
return join(sfRuntimeRoot(basePath), "BACKLOG.md");
}
function ensureDir(path: string): void {

View file

@ -4730,3 +4730,80 @@ export function supersedeLowestRankedMemories(
)
.run({ ":now": now, ":limit": limit });
}
// ─── Memory Sources ──────────────────────────────────────────────────────────
export function insertMemorySourceRow(args: {
id: string;
kind: string;
uri: string | null;
title: string | null;
content: string;
contentHash: string;
importedAt: string;
scope?: string;
tags?: string[];
}): void {
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
currentDb
.prepare(
`INSERT OR IGNORE INTO memory_sources (id, kind, uri, title, content, content_hash, imported_at, scope, tags)
VALUES (:id, :kind, :uri, :title, :content, :content_hash, :imported_at, :scope, :tags)`,
)
.run({
":id": args.id,
":kind": args.kind,
":uri": args.uri,
":title": args.title,
":content": args.content,
":content_hash": args.contentHash,
":imported_at": args.importedAt,
":scope": args.scope ?? "project",
":tags": JSON.stringify(args.tags ?? []),
});
}
export function deleteMemorySourceRow(id: string): boolean {
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
const res = currentDb
.prepare("DELETE FROM memory_sources WHERE id = :id")
.run({ ":id": id }) as { changes?: number };
return (res?.changes ?? 0) > 0;
}
// ─── Memory Embeddings ───────────────────────────────────────────────────────
export function upsertMemoryEmbedding(args: {
memoryId: string;
model: string;
dim: number;
vector: Uint8Array;
updatedAt: string;
}): void {
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
currentDb
.prepare(
`INSERT INTO memory_embeddings (memory_id, model, dim, vector, updated_at)
VALUES (:memory_id, :model, :dim, :vector, :updated_at)
ON CONFLICT(memory_id) DO UPDATE SET
model = excluded.model,
dim = excluded.dim,
vector = excluded.vector,
updated_at = excluded.updated_at`,
)
.run({
":memory_id": args.memoryId,
":model": args.model,
":dim": args.dim,
":vector": args.vector,
":updated_at": args.updatedAt,
});
}
export function deleteMemoryEmbedding(memoryId: string): boolean {
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
const res = currentDb
.prepare("DELETE FROM memory_embeddings WHERE memory_id = :id")
.run({ ":id": memoryId }) as { changes?: number };
return (res?.changes ?? 0) > 0;
}

View file

@ -10,64 +10,62 @@
*/
import assert from "node:assert/strict";
import { describe, it, mock } from "node:test";
import { describe, it } from "node:test";
// ── parseMilestoneTarget is already tested elsewhere; we just need
// the command-handler logic that sets canAskUser. ──────────────────────────
/** Mirrors the verb-detection logic from commands/handlers/auto.ts */
function detectVerbs(input: string): {
isAutoVerb: boolean;
isAutonomousVerb: boolean;
canAskUser: boolean;
fullAutonomy: boolean;
} {
const trimmed: string = input;
const isAutonomousVerb =
trimmed === "autonomous" || trimmed.startsWith("autonomous ");
const isAutoVerb = trimmed === "auto" || trimmed.startsWith("auto ");
const canAskUser = isAutoVerb;
const afterVerb = trimmed.replace(/^(?:auto|autonomous)\b/, "").trim();
const fullAutonomy =
/\bfull\b/.test(afterVerb) || afterVerb.includes("--full");
return { isAutoVerb, isAutonomousVerb, canAskUser, fullAutonomy };
}
describe("auto vs autonomous verb detection", () => {
it("/sf auto → isAutoVerb=true, isAutonomousVerb=false", () => {
const trimmed = "auto M001";
const isAutonomousVerb =
trimmed === "autonomous" || trimmed.startsWith("autonomous ");
const isAutoVerb = trimmed === "auto" || trimmed.startsWith("auto ");
assert.equal(isAutoVerb, true);
assert.equal(isAutonomousVerb, false);
const r = detectVerbs("auto M001");
assert.equal(r.isAutoVerb, true);
assert.equal(r.isAutonomousVerb, false);
});
it("/sf autonomous → isAutoVerb=false, isAutonomousVerb=true", () => {
const trimmed = "autonomous M001";
const isAutonomousVerb =
trimmed === "autonomous" || trimmed.startsWith("autonomous ");
const isAutoVerb = trimmed === "auto" || trimmed.startsWith("auto ");
assert.equal(isAutoVerb, false);
assert.equal(isAutonomousVerb, true);
const r = detectVerbs("autonomous M001");
assert.equal(r.isAutoVerb, false);
assert.equal(r.isAutonomousVerb, true);
});
it("/sf auto M001 → canAskUser=true", () => {
const trimmed = "auto M001";
const isAutoVerb = trimmed === "auto" || trimmed.startsWith("auto ");
const canAskUser = isAutoVerb;
assert.equal(canAskUser, true);
const r = detectVerbs("auto M001");
assert.equal(r.canAskUser, true);
});
it("/sf autonomous M001 → canAskUser=false", () => {
const trimmed = "autonomous M001";
const isAutoVerb = trimmed === "auto" || trimmed.startsWith("auto ");
const canAskUser = isAutoVerb;
assert.equal(canAskUser, false);
const r = detectVerbs("autonomous M001");
assert.equal(r.canAskUser, false);
});
it("/sf autonomous full → fullAutonomy=true, canAskUser=false", () => {
const trimmed = "autonomous full";
const isAutoVerb = trimmed === "auto" || trimmed.startsWith("auto ");
const canAskUser = isAutoVerb;
const afterMilestone = trimmed.replace(/^(?:auto|autonomous)\b/, "").trim();
const fullAutonomy =
/\bfull\b/.test(afterMilestone) || afterMilestone.includes("--full");
assert.equal(fullAutonomy, true);
assert.equal(canAskUser, false);
const r = detectVerbs("autonomous full");
assert.equal(r.fullAutonomy, true);
assert.equal(r.canAskUser, false);
});
it("/sf auto full → fullAutonomy=true, canAskUser=true", () => {
const trimmed = "auto full";
const isAutoVerb = trimmed === "auto" || trimmed.startsWith("auto ");
const canAskUser = isAutoVerb;
const afterMilestone = trimmed.replace(/^(?:auto|autonomous)\b/, "").trim();
const fullAutonomy =
/\bfull\b/.test(afterMilestone) || afterMilestone.includes("--full");
assert.equal(fullAutonomy, true);
assert.equal(canAskUser, true);
const r = detectVerbs("auto full");
assert.equal(r.fullAutonomy, true);
assert.equal(r.canAskUser, true);
});
});

View file

@ -257,12 +257,21 @@ export function readAuditLog(basePath?: string): LogEntry[] {
}
}
/**
* Return the number of emitUokAuditEvent failures since the last reset.
* Consumed by doctor/status to surface persistent audit divergence.
*/
export function getAuditEmitFailureCount(): number {
return _auditEmitFailureCount;
}
/**
* Reset buffer. Call at the start of each auto-loop unit to prevent log bleed
* between units running in the same process. Also used in tests via _resetLogs().
*/
export function _resetLogs(): void {
_buffer = [];
_auditEmitFailureCount = 0;
}
// ─── Internal ───────────────────────────────────────────────────────────

View file

@ -0,0 +1,408 @@
/**
* workflow-plugins.ts Unified discovery for workflow plugins.
*
* Discovers workflow definitions from three tiers (project > global > bundled)
* in both YAML and markdown formats. Each plugin declares an execution mode
* that controls how `/sf workflow <name>` dispatches it:
*
* oneshot prompt-only, no state or scaffolding
* yaml-step CustomWorkflowEngine run with GRAPH.yaml
* markdown-phase STATE.json + phase gates (current md template behavior)
* auto-milestone hooks into /sf auto pipeline (full-project only)
*
* Precedence: project > global > bundled. Same-named file wins.
*/
import {
readFileSync,
readdirSync,
existsSync,
statSync,
} from "node:fs";
import { join, dirname, extname, basename } from "node:path";
import { fileURLToPath } from "node:url";
import { homedir } from "node:os";
import { parse as parseYaml } from "yaml";
import { loadRegistry, type TemplateEntry } from "./workflow-templates.js";
// ─── WorkflowMode ─────────────────────────────────────────────────────────
export type WorkflowMode =
| "oneshot"
| "yaml-step"
| "markdown-phase"
| "auto-milestone";
export type PluginSource = "project" | "global" | "bundled";
export type PluginFormat = "yaml" | "md";
export interface WorkflowPluginMeta {
displayName: string;
description?: string;
mode: WorkflowMode;
phases?: string[];
triggers?: string[];
complexity?: string;
artifactDir?: string | null;
requiresProject?: boolean;
}
export interface WorkflowPlugin {
name: string;
path: string;
format: PluginFormat;
source: PluginSource;
meta: WorkflowPluginMeta;
/** Populated if the plugin failed validation — discovery still succeeds. */
error?: string;
}
// ─── Path resolution ─────────────────────────────────────────────────────
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
function resolveBundledDir(): string {
const moduleDir = dirname(fileURLToPath(import.meta.url));
const local = join(moduleDir, "workflow-templates");
if (existsSync(local)) return local;
const agentSfDir = join(sfHome, "agent", "extensions", "sf", "workflow-templates");
if (existsSync(agentSfDir)) return agentSfDir;
return local;
}
function globalPluginsDir(): string {
return join(sfHome, "workflows");
}
function projectPluginsDir(basePath: string): string {
return join(basePath, ".sf", "workflows");
}
function legacyDefsDir(basePath: string): string {
return join(basePath, ".sf", "workflow-defs");
}
// ─── Markdown frontmatter parsing ────────────────────────────────────────
/**
* Parse the `<template_meta>` block from bundled/user markdown workflow files.
* Returns a loose key-value map (strings only).
*/
function parseTemplateMeta(content: string): Record<string, string> {
const match = content.match(/<template_meta>([\s\S]*?)<\/template_meta>/);
if (!match) return {};
const body = match[1];
const result: Record<string, string> = {};
for (const line of body.split(/\r?\n/)) {
const trimmed = line.trim();
if (!trimmed) continue;
const colon = trimmed.indexOf(":");
if (colon === -1) continue;
const key = trimmed.slice(0, colon).trim();
const value = trimmed.slice(colon + 1).trim();
result[key] = value;
}
return result;
}
function parsePhasesFromMarkdown(content: string): string[] {
const match = content.match(/<phases>([\s\S]*?)<\/phases>/);
if (!match) return [];
const phases: string[] = [];
for (const line of match[1].split(/\r?\n/)) {
const m = line.match(/^\s*\d+\.\s*(\S+)/);
if (m) phases.push(m[1]);
}
return phases;
}
function firstHeading(content: string): string | undefined {
const match = content.match(/^#\s+(.+)$/m);
return match ? match[1].trim() : undefined;
}
function isValidMode(v: unknown): v is WorkflowMode {
return v === "oneshot" || v === "yaml-step" || v === "markdown-phase" || v === "auto-milestone";
}
// ─── Single-file plugin loaders ──────────────────────────────────────────
function loadMarkdownPlugin(filePath: string, source: PluginSource): WorkflowPlugin | null {
const name = basenameNoExt(filePath);
let content: string;
try {
content = readFileSync(filePath, "utf-8");
} catch {
return null;
}
const meta = parseTemplateMeta(content);
const phases = parsePhasesFromMarkdown(content);
const declaredMode = meta.mode;
const mode: WorkflowMode = isValidMode(declaredMode) ? declaredMode : "markdown-phase";
const triggers = meta.triggers
? meta.triggers.split(",").map((s) => s.trim()).filter(Boolean)
: undefined;
const artifactDirValue = meta.artifact_dir === "null" || meta.artifact_dir === "" ? null : meta.artifact_dir;
return {
name,
path: filePath,
format: "md",
source,
meta: {
displayName: meta.name || firstHeading(content) || name,
description: meta.description,
mode,
phases: phases.length > 0 ? phases : undefined,
triggers,
complexity: meta.complexity,
artifactDir: artifactDirValue ?? undefined,
requiresProject: meta.requires_project === "true",
},
};
}
function loadYamlPlugin(filePath: string, source: PluginSource): WorkflowPlugin | null {
const name = basenameNoExt(filePath);
let raw: string;
try {
raw = readFileSync(filePath, "utf-8");
} catch {
return null;
}
let parsed: unknown;
try {
parsed = parseYaml(raw);
} catch (err) {
return {
name,
path: filePath,
format: "yaml",
source,
meta: { displayName: name, mode: "yaml-step" },
error: `YAML parse error: ${err instanceof Error ? err.message : String(err)}`,
};
}
if (parsed == null || typeof parsed !== "object") {
return {
name,
path: filePath,
format: "yaml",
source,
meta: { displayName: name, mode: "yaml-step" },
error: "Definition is not an object",
};
}
const def = parsed as Record<string, unknown>;
const declaredMode = def.mode;
const mode: WorkflowMode = isValidMode(declaredMode) ? declaredMode : "yaml-step";
const steps = Array.isArray(def.steps) ? (def.steps as Array<Record<string, unknown>>) : [];
const phases = steps.map((s) => String(s.id ?? "")).filter(Boolean);
return {
name,
path: filePath,
format: "yaml",
source,
meta: {
displayName: typeof def.name === "string" && def.name.trim() ? def.name : name,
description: typeof def.description === "string" ? def.description : undefined,
mode,
phases: phases.length > 0 ? phases : undefined,
},
};
}
function basenameNoExt(filePath: string): string {
const ext = extname(filePath);
return basename(filePath, ext);
}
// ─── Directory walkers ───────────────────────────────────────────────────
const PLUGIN_EXTENSIONS = new Set([".yaml", ".yml", ".md"]);
function walkPluginDir(dir: string, source: PluginSource, out: Map<string, WorkflowPlugin>): void {
if (!existsSync(dir)) return;
let entries: string[];
try {
entries = readdirSync(dir);
} catch {
return;
}
for (const entry of entries) {
const full = join(dir, entry);
let info: ReturnType<typeof statSync>;
try {
info = statSync(full);
} catch {
continue;
}
if (!info.isFile()) continue;
const ext = extname(entry).toLowerCase();
if (!PLUGIN_EXTENSIONS.has(ext)) continue;
const plugin = ext === ".md"
? loadMarkdownPlugin(full, source)
: loadYamlPlugin(full, source);
if (!plugin) continue;
out.set(plugin.name, plugin);
}
}
function loadBundledPlugins(out: Map<string, WorkflowPlugin>): void {
const bundledDir = resolveBundledDir();
if (!existsSync(bundledDir)) return;
const registry = loadRegistry();
for (const [id, entry] of Object.entries(registry.templates)) {
const filePath = join(bundledDir, entry.file);
if (!existsSync(filePath)) continue;
const ext = extname(entry.file).toLowerCase();
const format: PluginFormat = ext === ".md" ? "md" : "yaml";
// TemplateEntry doesn't carry mode — default by format
const mode: WorkflowMode = format === "yaml" ? "yaml-step" : "markdown-phase";
out.set(id, {
name: id,
path: filePath,
format,
source: "bundled",
meta: {
displayName: entry.name,
description: entry.description,
mode,
phases: Array.isArray(entry.phases) && entry.phases.length > 0 ? entry.phases : undefined,
triggers: Array.isArray(entry.triggers) ? entry.triggers : undefined,
complexity: entry.estimated_complexity,
artifactDir: entry.artifact_dir,
requiresProject: entry.requires_project,
},
});
}
}
// ─── Public API ──────────────────────────────────────────────────────────
/**
* Discover all workflow plugins. Project overrides global overrides bundled.
*
* The legacy `.sf/workflow-defs/*.yaml` directory is also scanned as a
* fallback YAML source so existing user definitions keep working.
*/
export function discoverPlugins(basePath: string): Map<string, WorkflowPlugin> {
const out = new Map<string, WorkflowPlugin>();
loadBundledPlugins(out);
walkPluginDir(globalPluginsDir(), "global", out);
walkPluginDir(legacyDefsDir(basePath), "project", out);
walkPluginDir(projectPluginsDir(basePath), "project", out);
return out;
}
/**
* Resolve a plugin by name using the precedence chain.
* Returns null if no plugin by that name exists anywhere.
*/
export function resolvePlugin(basePath: string, name: string): WorkflowPlugin | null {
const plugins = discoverPlugins(basePath);
return plugins.get(name) ?? null;
}
/**
* Format all discovered plugins for display, grouped by mode.
*/
export function listPluginsFormatted(basePath: string): string {
const plugins = discoverPlugins(basePath);
if (plugins.size === 0) {
return "No workflow plugins found.\n\nRun /sf workflow new to author one.";
}
const groups: Record<WorkflowMode, WorkflowPlugin[]> = {
"oneshot": [],
"yaml-step": [],
"markdown-phase": [],
"auto-milestone": [],
};
for (const p of plugins.values()) {
groups[p.meta.mode].push(p);
}
const lines: string[] = ["Workflow Plugins\n"];
const order: WorkflowMode[] = ["markdown-phase", "yaml-step", "oneshot", "auto-milestone"];
for (const mode of order) {
const list = groups[mode].slice().sort((a, b) => a.name.localeCompare(b.name));
if (list.length === 0) continue;
lines.push(` [${mode}]`);
for (const p of list) {
const tag = `${p.source}/${p.format}`;
const desc = p.meta.description ? `${p.meta.description}` : "";
lines.push(` ${p.name.padEnd(22)} ${tag.padEnd(16)}${desc}`);
}
lines.push("");
}
lines.push("Usage:");
lines.push(" /sf workflow <name> Run a plugin directly");
lines.push(" /sf workflow info <name> Show plugin details");
lines.push(" /sf workflow install <src> Install a plugin from a URL");
return lines.join("\n");
}
/**
* Format a single plugin's metadata for `/sf workflow info <name>`.
*/
export function formatPluginInfo(plugin: WorkflowPlugin): string {
const lines = [
`Plugin: ${plugin.meta.displayName} (${plugin.name})`,
"",
`Source: ${plugin.source}`,
`Format: ${plugin.format}`,
`Mode: ${plugin.meta.mode}`,
`Path: ${plugin.path}`,
];
if (plugin.meta.description) {
lines.push(`About: ${plugin.meta.description}`);
}
if (plugin.meta.complexity) {
lines.push(`Complexity: ${plugin.meta.complexity}`);
}
if (plugin.meta.phases && plugin.meta.phases.length > 0) {
lines.push("", "Phases/Steps:");
plugin.meta.phases.forEach((p, i) => lines.push(` ${i + 1}. ${p}`));
}
if (plugin.meta.triggers && plugin.meta.triggers.length > 0) {
lines.push("", `Triggers: ${plugin.meta.triggers.join(", ")}`);
}
if (plugin.meta.artifactDir) {
lines.push("", `Artifacts: ${plugin.meta.artifactDir}`);
}
if (plugin.error) {
lines.push("", `Warning: ${plugin.error}`);
}
return lines.join("\n");
}
/**
* Get the plugin directory paths for the project/global/bundled tiers.
* Exposed for the install command and tests.
*/
export function getPluginDirs(basePath: string): { project: string; global: string; bundled: string; legacy: string } {
return {
project: projectPluginsDir(basePath),
global: globalPluginsDir(),
bundled: resolveBundledDir(),
legacy: legacyDefsDir(basePath),
};
}

View file

@ -18,6 +18,9 @@ const registryPath = join(
);
/** Resolve the SF extension dir with fallback to ~/.sf/agent/extensions/sf/. */
/**
* Resolve the SF extension directory with fallback to ~/.sf/agent/extensions/sf/.
*/
function resolveSfExtensionDir(): string {
const moduleDir = dirname(fileURLToPath(import.meta.url));
if (existsSync(join(moduleDir, "workflow-templates"))) return moduleDir;
@ -101,6 +104,10 @@ let cachedRegistry: TemplateRegistry | null = null;
/**
* Load and cache the workflow template registry.
*/
/**
* Load and cache the workflow template registry.
* Returns empty registry if file doesn't exist.
*/
export function loadRegistry(): TemplateRegistry {
if (cachedRegistry) return cachedRegistry;

View file

@ -776,37 +776,34 @@ export function removeWorktree(
/**
* Paths to skip in all worktree diffs (internal/runtime artifacts).
*
* NOTE: These arrays must stay synchronized with SF_RUNTIME_PATTERNS in gitignore.ts.
* That file is the canonical source of truth for runtime ignore patterns.
* This module uses a split representation (paths/exact/prefixes) for efficient matching.
* Derived from SF_RUNTIME_PATTERNS (canonical source in gitignore.ts).
* Split into three arrays for efficient matching:
* - SKIP_PATHS: directory patterns (ending in /)
* - SKIP_EXACT: exact filenames (no glob or / characters)
* - SKIP_PREFIXES: patterns with * (use prefix before *)
*/
const SKIP_PATHS = [
".sf/worktrees/",
".sf/runtime/",
".sf/activity/",
".sf/audit/",
".sf/exec/",
".sf/forensics/",
".sf/model-benchmarks/",
".sf/parallel/",
".sf/reports/",
".sf/journal/",
];
const SKIP_EXACT = [
".sf/STATE.md",
".sf/auto.lock",
".sf/metrics.json",
".sf/state-manifest.json",
".sf/doctor-history.jsonl",
".sf/event-log.jsonl",
".sf/notifications.jsonl",
".sf/routing-history.json",
".sf/self-feedback.jsonl",
".sf/repo-meta.json",
".sf/DISCUSSION-MANIFEST.json",
];
/** File prefixes to skip (for wildcard patterns like completed-units*.json, sf.db*). */
const SKIP_PREFIXES = [".sf/completed-units", ".sf/sf.db"];
function derivePatternsFromRuntime() {
const paths: string[] = [];
const exact: string[] = [];
const prefixes: string[] = [];
for (const pattern of SF_RUNTIME_PATTERNS) {
if (pattern.endsWith("/")) {
paths.push(pattern);
} else if (!pattern.includes("*") && !pattern.includes("/")) {
exact.push(pattern);
} else if (pattern.includes("*")) {
const prefix = pattern.slice(0, pattern.indexOf("*"));
if (prefix && !prefixes.includes(prefix)) {
prefixes.push(prefix);
}
}
}
return { paths, exact, prefixes };
}
const { paths: SKIP_PATHS, exact: SKIP_EXACT, prefixes: SKIP_PREFIXES } = derivePatternsFromRuntime();
function shouldSkipPath(filePath: string): boolean {
if (SKIP_PATHS.some((p) => filePath.startsWith(p))) return true;