396 lines
15 KiB
JavaScript
396 lines
15 KiB
JavaScript
#!/usr/bin/env node
|
|
import {
|
|
cpSync,
|
|
existsSync,
|
|
mkdirSync,
|
|
readFileSync,
|
|
symlinkSync,
|
|
} from "node:fs";
|
|
import { homedir } from "node:os";
|
|
import { delimiter, join, relative, resolve } from "node:path";
|
|
|
|
// SF Startup Loader
|
|
// Copyright (c) 2026 Singularity Forge
|
|
|
|
// Drop the node:sqlite ExperimentalWarning at the source so it never reaches
|
|
// stderr. Other ExperimentalWarnings (and all other warning classes) still
|
|
// propagate normally. node:sqlite has been experimental since v22 and the
|
|
// warning pollutes JSON output of `sf headless query` and similar commands.
|
|
{
|
|
const originalEmitWarning = process.emitWarning.bind(process);
|
|
type EmitArgs = Parameters<typeof process.emitWarning>;
|
|
process.emitWarning = ((...args: EmitArgs) => {
|
|
const [warning, typeOrOpts] = args;
|
|
const message = typeof warning === "string" ? warning : warning?.message;
|
|
const name =
|
|
typeof warning === "object" && warning !== null
|
|
? warning.name
|
|
: typeof typeOrOpts === "string"
|
|
? typeOrOpts
|
|
: typeof typeOrOpts === "object" && typeOrOpts !== null
|
|
? (typeOrOpts as { type?: string }).type
|
|
: undefined;
|
|
if (
|
|
name === "ExperimentalWarning" &&
|
|
typeof message === "string" &&
|
|
message.includes("SQLite is an experimental feature")
|
|
) {
|
|
return;
|
|
}
|
|
return originalEmitWarning(...args);
|
|
}) as typeof process.emitWarning;
|
|
}
|
|
|
|
// Fast-path: handle --version/-v and --help/-h before importing any heavy
|
|
// dependencies. This avoids loading the entire coding-agent barrel import
|
|
// (~1s) just to print a version string.
|
|
const sfRootDir = resolve(import.meta.dirname, "..");
|
|
const args = process.argv.slice(2);
|
|
const firstArg = args[0];
|
|
|
|
// Read package.json once — reused for version, banner, and SF_VERSION below
|
|
let sfVersion = "0.0.0";
|
|
try {
|
|
const pkg = JSON.parse(
|
|
readFileSync(join(sfRootDir, "package.json"), "utf-8"),
|
|
);
|
|
sfVersion = pkg.version || "0.0.0";
|
|
} catch {
|
|
/* ignore */
|
|
}
|
|
|
|
if (firstArg === "--version" || firstArg === "-v") {
|
|
process.stdout.write(sfVersion + "\n");
|
|
process.exit(0);
|
|
}
|
|
|
|
if (firstArg === "--help" || firstArg === "-h") {
|
|
const { printHelp } = await import("./help-text.js");
|
|
printHelp(sfVersion);
|
|
process.exit(0);
|
|
}
|
|
|
|
if (
|
|
firstArg &&
|
|
firstArg !== "--" &&
|
|
args.slice(1).some((arg) => arg === "--help" || arg === "-h")
|
|
) {
|
|
const { printHelp, printSubcommandHelp } = await import("./help-text.js");
|
|
if (!printSubcommandHelp(firstArg, sfVersion)) {
|
|
printHelp(sfVersion);
|
|
}
|
|
process.exit(0);
|
|
}
|
|
|
|
// Fast-path invalid headless flags before importing cli.ts. Project-state
|
|
// validation belongs to headless.ts because bare `sf headless` is help, `init`
|
|
// is allowed before .sf exists, and explicit commands decide their own state
|
|
// requirements.
|
|
if (firstArg === "headless") {
|
|
for (let i = 1; i < args.length; i += 1) {
|
|
const arg = args[i];
|
|
if (arg === "--timeout" && i + 1 < args.length) {
|
|
const timeout = parseInt(args[++i], 10);
|
|
if (Number.isNaN(timeout) || timeout < 0) {
|
|
process.stderr.write(
|
|
"[headless] Error: --timeout must be a non-negative integer (milliseconds, 0 to disable)\n",
|
|
);
|
|
process.exit(1);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Schedule due-items banner — lightweight check before heavy imports
|
|
if (
|
|
!process.env.SF_QUIET &&
|
|
firstArg !== "--version" &&
|
|
firstArg !== "-v" &&
|
|
firstArg !== "--help" &&
|
|
firstArg !== "-h" &&
|
|
firstArg !== "schedule"
|
|
) {
|
|
try {
|
|
const now = Date.now();
|
|
let passiveDueCount = 0;
|
|
let projectAutonomousDispatchDueCount = 0;
|
|
const schedulePaths = [
|
|
{ path: join(process.cwd(), ".sf", "schedule.jsonl"), scope: "project" },
|
|
{
|
|
path: join(process.cwd(), ".sf", "runtime", "schedule.jsonl"),
|
|
scope: "project",
|
|
},
|
|
{ path: join(homedir(), ".sf", "schedule.jsonl"), scope: "global" },
|
|
];
|
|
for (const { path: schedulePath, scope } of schedulePaths) {
|
|
if (!existsSync(schedulePath)) continue;
|
|
const content = readFileSync(schedulePath, "utf-8");
|
|
const latestById = new Map<string, Record<string, unknown>>();
|
|
for (const line of content.split("\n")) {
|
|
if (!line.trim()) continue;
|
|
try {
|
|
const entry = JSON.parse(line);
|
|
if (!entry?.id || typeof entry.id !== "string") continue;
|
|
const existing = latestById.get(entry.id);
|
|
if (
|
|
!existing ||
|
|
String(entry.created_at ?? "") > String(existing.created_at ?? "")
|
|
) {
|
|
latestById.set(entry.id, entry);
|
|
}
|
|
} catch {
|
|
// skip corrupt lines
|
|
}
|
|
}
|
|
for (const entry of latestById.values()) {
|
|
if (
|
|
entry.status === "pending" &&
|
|
Date.parse(String(entry.due_at)) <= now
|
|
) {
|
|
if (
|
|
scope === "project" &&
|
|
entry.autonomous_dispatch === true &&
|
|
(entry.kind === "command" || entry.kind === "prompt")
|
|
) {
|
|
projectAutonomousDispatchDueCount++;
|
|
} else {
|
|
passiveDueCount++;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
if (passiveDueCount > 0) {
|
|
process.stderr.write(
|
|
`[forge] ${passiveDueCount} passive scheduled item${passiveDueCount === 1 ? "" : "s"} due now. Manage: /schedule list or sf schedule list\n`,
|
|
);
|
|
}
|
|
if (projectAutonomousDispatchDueCount > 0) {
|
|
process.stderr.write(
|
|
`[forge] ${projectAutonomousDispatchDueCount} scheduled autonomous dispatch item${projectAutonomousDispatchDueCount === 1 ? "" : "s"} due now; autonomous mode will consume project entries.\n`,
|
|
);
|
|
}
|
|
} catch {
|
|
// non-fatal
|
|
}
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Runtime dependency checks — fail fast with clear diagnostics before any
|
|
// heavy imports. Reads minimum Node version from the engines field in
|
|
// package.json (already parsed above) and verifies git is available.
|
|
// ---------------------------------------------------------------------------
|
|
{
|
|
const MIN_NODE_MAJOR = 26;
|
|
const red = "\x1b[31m";
|
|
const bold = "\x1b[1m";
|
|
const dim = "\x1b[2m";
|
|
const reset = "\x1b[0m";
|
|
|
|
// -- Node version --
|
|
const nodeMajor = parseInt(process.versions.node.split(".")[0], 10);
|
|
if (nodeMajor < MIN_NODE_MAJOR) {
|
|
process.stderr.write(
|
|
`\n${red}${bold}Error:${reset} SF requires Node.js >= ${MIN_NODE_MAJOR}.0.0\n` +
|
|
` You are running Node.js ${process.versions.node}\n\n` +
|
|
`${dim}Install a supported version:${reset}\n` +
|
|
` nvm install ${MIN_NODE_MAJOR} ${dim}# if using nvm${reset}\n` +
|
|
` fnm install ${MIN_NODE_MAJOR} ${dim}# if using fnm${reset}\n` +
|
|
` brew install node@${MIN_NODE_MAJOR} ${dim}# macOS Homebrew${reset}\n\n`,
|
|
);
|
|
process.exit(1);
|
|
}
|
|
|
|
// -- git --
|
|
try {
|
|
const { execFileSync } = await import("node:child_process");
|
|
execFileSync("git", ["--version"], { stdio: "ignore" });
|
|
} catch {
|
|
process.stderr.write(
|
|
`\n${red}${bold}Error:${reset} SF requires git but it was not found on PATH.\n\n` +
|
|
`${dim}Install git:${reset}\n` +
|
|
` https://git-scm.com/downloads\n\n`,
|
|
);
|
|
process.exit(1);
|
|
}
|
|
}
|
|
|
|
import { agentDir, appRoot } from "./app-paths.js";
|
|
import { serializeBundledExtensionPaths } from "./bundled-extension-paths.js";
|
|
import { discoverExtensionEntryPaths } from "./extension-discovery.js";
|
|
import {
|
|
isExtensionEnabled,
|
|
loadRegistry,
|
|
readManifestFromEntryPath,
|
|
} from "./extension-registry.js";
|
|
import { renderLogo } from "./logo.js";
|
|
import { applyRtkProcessEnv } from "./rtk.js";
|
|
|
|
// pkg/ is a shim directory: contains sf's piConfig (package.json) and pi's
|
|
// theme assets (dist/modes/interactive/theme/) without a src/ directory.
|
|
// This allows config.js to:
|
|
// 1. Read piConfig.name → "sf" (branding)
|
|
// 2. Resolve themes via dist/ (no src/ present → uses dist path)
|
|
const pkgDir = resolve(import.meta.dirname, "..", "pkg");
|
|
|
|
// MUST be set before any dynamic import of pi SDK fires — this is what config.js
|
|
// reads to determine APP_NAME and CONFIG_DIR_NAME
|
|
process.env.PI_PACKAGE_DIR = pkgDir;
|
|
process.env.PI_SKIP_VERSION_CHECK = "1"; // SF runs its own update check in cli.ts — suppress pi's
|
|
process.title = "sf";
|
|
|
|
// Print branded banner on first launch (before ~/.sf/ exists).
|
|
// Set SF_FIRST_RUN_BANNER so cli.ts skips the duplicate welcome screen.
|
|
if (!existsSync(appRoot)) {
|
|
const cyan = "\x1b[36m";
|
|
const green = "\x1b[32m";
|
|
const dim = "\x1b[2m";
|
|
const reset = "\x1b[0m";
|
|
const colorCyan = (s: string) => `${cyan}${s}${reset}`;
|
|
process.stderr.write(
|
|
renderLogo(colorCyan) +
|
|
"\n" +
|
|
` Singularity Forge ${dim}v${sfVersion}${reset}\n` +
|
|
` ${green}Welcome.${reset} Setting up your environment...\n\n`,
|
|
);
|
|
process.env.SF_FIRST_RUN_BANNER = "1";
|
|
}
|
|
|
|
// SF_CODING_AGENT_DIR — tells pi's getAgentDir() to return ~/.sf/agent/ instead of ~/.sf/agent/
|
|
process.env.SF_CODING_AGENT_DIR = agentDir;
|
|
|
|
// SF_PKG_ROOT — absolute path to sf-run package root. Used by deployed extensions
|
|
// (e.g. auto.ts resume path) to import modules like resource-loader.js that live
|
|
// in the package tree, not in the deployed ~/.sf/agent/ tree.
|
|
process.env.SF_PKG_ROOT = sfRootDir;
|
|
|
|
// RTK environment — make ~/.sf/agent/bin visible to all child-process paths,
|
|
// not just the bash tool, and force-disable RTK telemetry for SF-managed use.
|
|
applyRtkProcessEnv(process.env);
|
|
|
|
// NODE_PATH — make sf's own node_modules available to extensions loaded via jiti.
|
|
// Without this, extensions (e.g. browser-tools) can't resolve dependencies like
|
|
// `playwright` because jiti resolves modules from coding-agent's location, not sf's.
|
|
// Prepending sf's node_modules to NODE_PATH fixes this for all extensions.
|
|
const sfNodeModules = join(sfRootDir, "node_modules");
|
|
process.env.NODE_PATH = [sfNodeModules, process.env.NODE_PATH]
|
|
.filter(Boolean)
|
|
.join(delimiter);
|
|
// Force Node to re-evaluate module search paths with the updated NODE_PATH.
|
|
// Must happen synchronously before cli.js imports → extension loading.
|
|
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
|
const { Module } = await import("node:module");
|
|
(Module as any)._initPaths?.();
|
|
|
|
// SF_VERSION — expose package version so extensions can display it
|
|
process.env.SF_VERSION = sfVersion;
|
|
|
|
// SF_BIN_PATH — absolute path to this loader (dist/loader.js), used by patched subagent
|
|
// to spawn sf instead of pi when dispatching workflow tasks.
|
|
// Respect a pre-set value so a source-mode wrapper (e.g. bin/sf-from-source) can
|
|
// advertise the executable shim instead of the .ts loader path (which spawn() can't exec).
|
|
process.env.SF_BIN_PATH = process.env.SF_BIN_PATH || process.argv[1];
|
|
|
|
// SF_WORKFLOW_PATH — absolute path to bundled SF-WORKFLOW.md, used by patched sf extension
|
|
// when dispatching workflow prompts. Prefers dist/resources/ (stable, set at build time)
|
|
// over src/resources/ (live working tree) — see resource-loader.ts for rationale.
|
|
// Guard: the build sometimes copies dist/resources/extensions/ without the root-level
|
|
// markdown files, leaving existsSync(distRes)=true but SF-WORKFLOW.md missing. Fall
|
|
// back to src/resources/ when the file itself isn't present in dist.
|
|
const distRes = join(sfRootDir, "dist", "resources");
|
|
const srcRes = join(sfRootDir, "src", "resources");
|
|
const resourcesDir = existsSync(join(distRes, "SF-WORKFLOW.md"))
|
|
? distRes
|
|
: srcRes;
|
|
process.env.SF_WORKFLOW_PATH = join(resourcesDir, "SF-WORKFLOW.md");
|
|
|
|
// SF_BUNDLED_EXTENSION_PATHS — dynamically discovered bundled extension entry points.
|
|
// Uses the shared discoverExtensionEntryPaths() to scan the bundled resources
|
|
// directory, then remaps discovered paths to agentDir (~/.sf/agent/extensions/)
|
|
// where initResources() will sync them.
|
|
const bundledExtDir = join(resourcesDir, "extensions");
|
|
const agentExtDir = join(agentDir, "extensions");
|
|
const registry = loadRegistry();
|
|
const discoveredExtensionPaths = discoverExtensionEntryPaths(bundledExtDir)
|
|
.map((entryPath) => join(agentExtDir, relative(bundledExtDir, entryPath)))
|
|
.filter((entryPath) => {
|
|
const manifest = readManifestFromEntryPath(entryPath);
|
|
if (!manifest) return true; // no manifest = always load
|
|
return isExtensionEnabled(registry, manifest.id);
|
|
});
|
|
|
|
process.env.SF_BUNDLED_EXTENSION_PATHS = serializeBundledExtensionPaths(
|
|
discoveredExtensionPaths,
|
|
);
|
|
|
|
// Respect HTTP_PROXY / HTTPS_PROXY / NO_PROXY env vars for all outbound requests.
|
|
// coding-agent's cli.ts sets this, but SF bypasses that entry point — so we
|
|
// must set it here before any SDK clients are created.
|
|
// Lazy-load undici (~200ms) only when proxy env vars are actually set.
|
|
if (
|
|
process.env.HTTP_PROXY ||
|
|
process.env.HTTPS_PROXY ||
|
|
process.env.http_proxy ||
|
|
process.env.https_proxy
|
|
) {
|
|
const { EnvHttpProxyAgent, setGlobalDispatcher } = await import("undici");
|
|
setGlobalDispatcher(new EnvHttpProxyAgent());
|
|
}
|
|
|
|
// Ensure workspace packages are linked (or copied on Windows) before importing
|
|
// cli.js (which imports @singularity-forge/*).
|
|
// npm postinstall handles this normally, but npx --ignore-scripts skips postinstall.
|
|
// On Windows without Developer Mode or admin rights, symlinkSync will throw even for
|
|
// 'junction' type — so we fall back to cpSync (a full directory copy) which works
|
|
// everywhere without elevated permissions.
|
|
const sfScopeDir = join(sfNodeModules, "@singularity-forge");
|
|
const packagesDir = join(sfRootDir, "packages");
|
|
const wsPackages = ["native", "agent-core", "ai", "coding-agent", "tui"];
|
|
try {
|
|
if (!existsSync(sfScopeDir)) mkdirSync(sfScopeDir, { recursive: true });
|
|
for (const pkg of wsPackages) {
|
|
const target = join(sfScopeDir, pkg);
|
|
const source = join(packagesDir, pkg);
|
|
if (!existsSync(source) || existsSync(target)) continue;
|
|
try {
|
|
symlinkSync(source, target, "junction");
|
|
} catch {
|
|
// Symlink failed (common on Windows without Developer Mode / admin).
|
|
// Fall back to a directory copy — slower on first run but universally works.
|
|
try {
|
|
cpSync(source, target, { recursive: true });
|
|
} catch {
|
|
/* non-fatal */
|
|
}
|
|
}
|
|
}
|
|
} catch {
|
|
/* non-fatal */
|
|
}
|
|
|
|
// Validate critical workspace packages are resolvable. If still missing after the
|
|
// symlink+copy attempts, emit a clear diagnostic instead of a cryptic
|
|
// ERR_MODULE_NOT_FOUND from deep inside cli.js.
|
|
const criticalPackages = ["coding-agent"];
|
|
const missingPackages = criticalPackages.filter(
|
|
(pkg) => !existsSync(join(sfScopeDir, pkg)),
|
|
);
|
|
if (missingPackages.length > 0) {
|
|
const missing = missingPackages
|
|
.map((p) => `@singularity-forge/${p}`)
|
|
.join(", ");
|
|
process.stderr.write(
|
|
`\nError: SF installation is broken — missing packages: ${missing}\n\n` +
|
|
`This is usually caused by one of:\n` +
|
|
` • An outdated version installed from npm (run: npm install -g singularity-forge@latest)\n` +
|
|
` • The packages/ directory was excluded from the installed tarball\n` +
|
|
` • A filesystem error prevented linking or copying the workspace packages\n\n` +
|
|
`Fix it by reinstalling:\n\n` +
|
|
` npm install -g singularity-forge@latest\n\n` +
|
|
`If the issue persists, please open an issue at:\n` +
|
|
` https://github.com/singularity-ng/singularity-forge/issues\n`,
|
|
);
|
|
process.exit(1);
|
|
}
|
|
|
|
// Dynamic import defers ESM evaluation — config.js will see PI_PACKAGE_DIR above
|
|
await import("./cli.js");
|