refactor(extensions): consolidate duplicate code into canonical modules
- Delete ghost package packages/pi-agent-core (no dist, no consumers, TS build errors; JS source sf-db.js had 3 commits not mirrored in TS) - Remove build:pi-agent-core from root package.json build:pi pipeline - Merge all models from MODEL_COST_PER_1K_INPUT into BUNDLED_COST_TABLE (model-cost-table.js is now the single canonical cost source) - Remove duplicate MODEL_COST_PER_1K_INPUT object and getModelCost() from model-router.js; use lookupModelCost() from model-cost-table.js - Replace hand-rolled isTransientNetworkError in preferences-models.js with delegation to classifyError() in error-classifier.js Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
parent
5ea96143ca
commit
64ddbd950f
27 changed files with 859 additions and 10350 deletions
Binary file not shown.
BIN
.sf/backups/db/sf.db.2026-05-11T02-27-37-024Z
Normal file
BIN
.sf/backups/db/sf.db.2026-05-11T02-27-37-024Z
Normal file
Binary file not shown.
BIN
.sf/metrics.db
BIN
.sf/metrics.db
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -399,7 +399,6 @@ Generated from `src/resources/extensions/*/extension-manifest.json`.
|
||||||
- `slash-commands` — [extension-manifest.json](src/resources/extensions/slash-commands/extension-manifest.json)
|
- `slash-commands` — [extension-manifest.json](src/resources/extensions/slash-commands/extension-manifest.json)
|
||||||
- `ttsr` — [extension-manifest.json](src/resources/extensions/ttsr/extension-manifest.json)
|
- `ttsr` — [extension-manifest.json](src/resources/extensions/ttsr/extension-manifest.json)
|
||||||
- `universal-config` — [extension-manifest.json](src/resources/extensions/universal-config/extension-manifest.json)
|
- `universal-config` — [extension-manifest.json](src/resources/extensions/universal-config/extension-manifest.json)
|
||||||
- `vectordrive` — [extension-manifest.json](src/resources/extensions/vectordrive/extension-manifest.json)
|
|
||||||
- `voice` — [extension-manifest.json](src/resources/extensions/voice/extension-manifest.json)
|
- `voice` — [extension-manifest.json](src/resources/extensions/voice/extension-manifest.json)
|
||||||
|
|
||||||
### Search Providers
|
### Search Providers
|
||||||
|
|
|
||||||
|
|
@ -44,12 +44,11 @@
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build:pi-tui": "npm --workspace @singularity-forge/tui run build",
|
"build:pi-tui": "npm --workspace @singularity-forge/tui run build",
|
||||||
"build:pi-ai": "npm --workspace @singularity-forge/ai run build",
|
"build:pi-ai": "npm --workspace @singularity-forge/ai run build",
|
||||||
"build:pi-agent-core": "npm --workspace @singularity-forge/agent-core run build",
|
|
||||||
"build:pi-coding-agent": "npm --workspace @singularity-forge/coding-agent run build",
|
"build:pi-coding-agent": "npm --workspace @singularity-forge/coding-agent run build",
|
||||||
"build:native-pkg": "npm --workspace @singularity-forge/native run build",
|
"build:native-pkg": "npm --workspace @singularity-forge/native run build",
|
||||||
"build:rpc-client": "npm --workspace @singularity-forge/rpc-client run build",
|
"build:rpc-client": "npm --workspace @singularity-forge/rpc-client run build",
|
||||||
"build:google-gemini-cli-provider": "npm --workspace @singularity-forge/google-gemini-cli-provider run build",
|
"build:google-gemini-cli-provider": "npm --workspace @singularity-forge/google-gemini-cli-provider run build",
|
||||||
"build:pi": "npm run build:native-pkg && npm run build:pi-tui && npm run build:google-gemini-cli-provider && npm run build:pi-ai && npm run build:pi-agent-core && npm run build:pi-coding-agent",
|
"build:pi": "npm run build:native-pkg && npm run build:pi-tui && npm run build:google-gemini-cli-provider && npm run build:pi-ai && npm run build:pi-coding-agent",
|
||||||
"build:daemon": "npm --workspace @singularity-forge/daemon run build",
|
"build:daemon": "npm --workspace @singularity-forge/daemon run build",
|
||||||
"build:core": "npm run build:pi && npm run build:rpc-client && npm run build:daemon && npm run check:versioned-json && tsgo && npm run copy-resources && npm run copy-themes && npm run copy-export-html",
|
"build:core": "npm run build:pi && npm run build:rpc-client && npm run build:daemon && npm run check:versioned-json && tsgo && npm run copy-resources && npm run copy-themes && npm run copy-export-html",
|
||||||
"build": "npm run build:core && node scripts/build-web-if-stale.cjs",
|
"build": "npm run build:core && node scripts/build-web-if-stale.cjs",
|
||||||
|
|
|
||||||
|
|
@ -1,25 +0,0 @@
|
||||||
{
|
|
||||||
"name": "@singularity-forge/pi-agent-core",
|
|
||||||
"version": "2.75.3",
|
|
||||||
"description": "SF database abstraction layer and agent-core primitives (TypeScript)",
|
|
||||||
"type": "module",
|
|
||||||
"main": "./dist/index.js",
|
|
||||||
"types": "./dist/index.d.ts",
|
|
||||||
"exports": {
|
|
||||||
".": {
|
|
||||||
"types": "./dist/index.d.ts",
|
|
||||||
"import": "./dist/index.js"
|
|
||||||
},
|
|
||||||
"./db/sf-db": {
|
|
||||||
"types": "./dist/db/sf-db.d.ts",
|
|
||||||
"import": "./dist/db/sf-db.js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"build": "tsc -p tsconfig.json"
|
|
||||||
},
|
|
||||||
"dependencies": {},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=26.1.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
/**
|
|
||||||
* SF Error Types — Typed error hierarchy for diagnostics and crash recovery.
|
|
||||||
*
|
|
||||||
* All SF-specific errors extend SFError, which carries a stable `code`
|
|
||||||
* string suitable for programmatic matching. Error codes are defined as
|
|
||||||
* constants so callers can switch on them without string-matching.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// ─── Error Codes ──────────────────────────────────────────────────────────────
|
|
||||||
export const SF_STALE_STATE = "SF_STALE_STATE";
|
|
||||||
export const SF_LOCK_HELD = "SF_LOCK_HELD";
|
|
||||||
export const SF_ARTIFACT_MISSING = "SF_ARTIFACT_MISSING";
|
|
||||||
export const SF_GIT_ERROR = "SF_GIT_ERROR";
|
|
||||||
export const SF_MERGE_CONFLICT = "SF_MERGE_CONFLICT";
|
|
||||||
export const SF_PARSE_ERROR = "SF_PARSE_ERROR";
|
|
||||||
export const SF_IO_ERROR = "SF_IO_ERROR";
|
|
||||||
|
|
||||||
// ─── Base Error ───────────────────────────────────────────────────────────────
|
|
||||||
export class SFError extends Error {
|
|
||||||
code: string;
|
|
||||||
constructor(code: string, message: string, options?: ErrorOptions) {
|
|
||||||
super(message, options);
|
|
||||||
this.name = "SFError";
|
|
||||||
this.code = code;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,231 +0,0 @@
|
||||||
/**
|
|
||||||
* SF Gate Registry — single source of truth for quality-gate ownership.
|
|
||||||
*
|
|
||||||
* Each gate declares which workflow turn owns it, the scope at which it is
|
|
||||||
* persisted in the `quality_gates` table, and the question/guidance text used
|
|
||||||
* in the prompt that turn sends.
|
|
||||||
*/
|
|
||||||
import { SF_PARSE_ERROR, SFError } from "./errors.js";
|
|
||||||
|
|
||||||
export type GateId =
|
|
||||||
| "Q3"
|
|
||||||
| "Q4"
|
|
||||||
| "Q5"
|
|
||||||
| "Q6"
|
|
||||||
| "Q7"
|
|
||||||
| "Q8"
|
|
||||||
| "MV01"
|
|
||||||
| "MV02"
|
|
||||||
| "MV03"
|
|
||||||
| "MV04";
|
|
||||||
|
|
||||||
export interface GateDefinition {
|
|
||||||
id: GateId;
|
|
||||||
scope: "slice" | "task" | "milestone";
|
|
||||||
ownerTurn: string;
|
|
||||||
question: string;
|
|
||||||
guidance: string;
|
|
||||||
promptSection: string;
|
|
||||||
minOmissionWords: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const GATE_REGISTRY: Record<GateId, GateDefinition> = {
|
|
||||||
Q3: {
|
|
||||||
id: "Q3",
|
|
||||||
scope: "slice",
|
|
||||||
ownerTurn: "gate-evaluate",
|
|
||||||
question: "How can this be exploited?",
|
|
||||||
guidance: [
|
|
||||||
"Identify abuse scenarios: parameter tampering, replay attacks, privilege escalation.",
|
|
||||||
"Map data exposure risks: PII, tokens, secrets accessible through this slice.",
|
|
||||||
"Define input trust boundaries: untrusted user input reaching DB, API, or filesystem.",
|
|
||||||
"If none apply, return verdict 'omitted' with rationale explaining why.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Abuse Surface",
|
|
||||||
minOmissionWords: 20,
|
|
||||||
},
|
|
||||||
Q4: {
|
|
||||||
id: "Q4",
|
|
||||||
scope: "slice",
|
|
||||||
ownerTurn: "gate-evaluate",
|
|
||||||
question: "What existing promises does this break?",
|
|
||||||
guidance: [
|
|
||||||
"List which existing requirements (R001, R003, etc.) are touched by this slice.",
|
|
||||||
"Identify what must be re-tested after shipping.",
|
|
||||||
"Flag decisions that should be revisited given the new scope.",
|
|
||||||
"If no existing requirements are affected, return verdict 'omitted'.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Broken Promises",
|
|
||||||
minOmissionWords: 0,
|
|
||||||
},
|
|
||||||
Q5: {
|
|
||||||
id: "Q5",
|
|
||||||
scope: "task",
|
|
||||||
ownerTurn: "execute-task",
|
|
||||||
question: "What breaks when dependencies fail?",
|
|
||||||
guidance: [
|
|
||||||
"Enumerate the task's external dependencies (APIs, filesystem, network, subprocesses).",
|
|
||||||
"Describe the failure path for each: timeout, malformed response, connection loss.",
|
|
||||||
"Verify the implementation handles each failure or explicitly bubbles the error.",
|
|
||||||
"Return verdict 'omitted' only if the task has no external dependencies.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Failure Modes",
|
|
||||||
minOmissionWords: 15,
|
|
||||||
},
|
|
||||||
Q6: {
|
|
||||||
id: "Q6",
|
|
||||||
scope: "task",
|
|
||||||
ownerTurn: "execute-task",
|
|
||||||
question: "What is the 10x load breakpoint?",
|
|
||||||
guidance: [
|
|
||||||
"Identify the resource that saturates first at 10x the expected load.",
|
|
||||||
"Describe the protection applied (pool sizing, rate limiting, pagination, caching).",
|
|
||||||
"Return verdict 'omitted' if the task has no runtime load dimension.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Load Profile",
|
|
||||||
minOmissionWords: 10,
|
|
||||||
},
|
|
||||||
Q7: {
|
|
||||||
id: "Q7",
|
|
||||||
scope: "task",
|
|
||||||
ownerTurn: "execute-task",
|
|
||||||
question: "What negative tests protect this task?",
|
|
||||||
guidance: [
|
|
||||||
"List malformed inputs, error paths, and boundary conditions the tests cover.",
|
|
||||||
"Point to the specific test files or cases that assert each negative scenario.",
|
|
||||||
"Return verdict 'omitted' only if the task has no meaningful negative surface.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Negative Tests",
|
|
||||||
minOmissionWords: 15,
|
|
||||||
},
|
|
||||||
Q8: {
|
|
||||||
id: "Q8",
|
|
||||||
scope: "slice",
|
|
||||||
ownerTurn: "complete-slice",
|
|
||||||
question: "How will ops know this slice is healthy or broken?",
|
|
||||||
guidance: [
|
|
||||||
"Describe the health signal (metric, log line, dashboard) that proves the slice works.",
|
|
||||||
"Describe the failure signal that triggers an alert or paging.",
|
|
||||||
"Document the recovery procedure and any monitoring gaps.",
|
|
||||||
"Return verdict 'omitted' only for slices with no runtime behavior at all.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Operational Readiness",
|
|
||||||
minOmissionWords: 0,
|
|
||||||
},
|
|
||||||
MV01: {
|
|
||||||
id: "MV01",
|
|
||||||
scope: "milestone",
|
|
||||||
ownerTurn: "validate-milestone",
|
|
||||||
question: "Is every success criterion in the milestone roadmap satisfied?",
|
|
||||||
guidance: [
|
|
||||||
"Walk the success-criteria checklist from the milestone roadmap.",
|
|
||||||
"For each criterion, point to the slice / assessment / verification evidence that proves it.",
|
|
||||||
"Return verdict 'flag' if any criterion is unmet or unverifiable.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Success Criteria Checklist",
|
|
||||||
minOmissionWords: 0,
|
|
||||||
},
|
|
||||||
MV02: {
|
|
||||||
id: "MV02",
|
|
||||||
scope: "milestone",
|
|
||||||
ownerTurn: "validate-milestone",
|
|
||||||
question: "Does every slice have a SUMMARY.md and a passing assessment?",
|
|
||||||
guidance: [
|
|
||||||
"Confirm every slice listed in the roadmap has a SUMMARY.md.",
|
|
||||||
"Confirm each slice has an ASSESSMENT verdict of 'pass' (or justified 'omitted').",
|
|
||||||
"Flag missing artifacts and slices with outstanding follow-ups or known limitations.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Slice Delivery Audit",
|
|
||||||
minOmissionWords: 0,
|
|
||||||
},
|
|
||||||
MV03: {
|
|
||||||
id: "MV03",
|
|
||||||
scope: "milestone",
|
|
||||||
ownerTurn: "validate-milestone",
|
|
||||||
question: "Do the slices integrate end-to-end?",
|
|
||||||
guidance: [
|
|
||||||
"Trace at least one cross-slice flow proving the pieces compose.",
|
|
||||||
"Flag gaps where two slices were built in isolation with no integration evidence.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Cross-Slice Integration",
|
|
||||||
minOmissionWords: 0,
|
|
||||||
},
|
|
||||||
MV04: {
|
|
||||||
id: "MV04",
|
|
||||||
scope: "milestone",
|
|
||||||
ownerTurn: "validate-milestone",
|
|
||||||
question: "Are all touched requirements covered and still coherent?",
|
|
||||||
guidance: [
|
|
||||||
"For each requirement advanced, validated, surfaced, or invalidated across the milestone's slices, confirm the milestone-level evidence matches.",
|
|
||||||
"Flag requirements that slices claim to advance but no artifact proves.",
|
|
||||||
].join("\n"),
|
|
||||||
promptSection: "Requirement Coverage",
|
|
||||||
minOmissionWords: 0,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const ORDERED_GATES = Object.values(GATE_REGISTRY);
|
|
||||||
|
|
||||||
export function getGatesForTurn(turn: string): GateDefinition[] {
|
|
||||||
return ORDERED_GATES.filter((g) => g.ownerTurn === turn);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getGateIdsForTurn(turn: string): Set<string> {
|
|
||||||
return new Set(getGatesForTurn(turn).map((g) => g.id));
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getGateDefinition(id: string): GateDefinition | undefined {
|
|
||||||
return GATE_REGISTRY[id as GateId];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getOwnerTurn(id: string): string {
|
|
||||||
const def = GATE_REGISTRY[id as GateId];
|
|
||||||
if (!def) {
|
|
||||||
throw new SFError(SF_PARSE_ERROR, `gate-registry: unknown gate id "${id}"`);
|
|
||||||
}
|
|
||||||
return def.ownerTurn;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface PendingGateRow {
|
|
||||||
gate_id: string;
|
|
||||||
[key: string]: unknown;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function assertGateCoverage(
|
|
||||||
pending: PendingGateRow[],
|
|
||||||
turn: string,
|
|
||||||
options: { requireAll?: boolean } = {},
|
|
||||||
): void {
|
|
||||||
const requireAll = options.requireAll ?? true;
|
|
||||||
const expected = getGateIdsForTurn(turn);
|
|
||||||
const pendingIds = new Set(pending.map((g) => g.gate_id));
|
|
||||||
const unknown: string[] = [];
|
|
||||||
for (const id of pendingIds) {
|
|
||||||
const def = getGateDefinition(id);
|
|
||||||
if (!def) {
|
|
||||||
unknown.push(id);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (def.ownerTurn !== turn) {
|
|
||||||
unknown.push(`${id} (owned by ${def.ownerTurn}, not ${turn})`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (unknown.length > 0) {
|
|
||||||
throw new SFError(
|
|
||||||
SF_PARSE_ERROR,
|
|
||||||
`assertGateCoverage: turn "${turn}" received pending gates it does not own: ${unknown.join(", ")}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (requireAll) {
|
|
||||||
const missing: string[] = [];
|
|
||||||
for (const id of expected) {
|
|
||||||
if (!pendingIds.has(id)) missing.push(id);
|
|
||||||
}
|
|
||||||
if (missing.length > 0) {
|
|
||||||
throw new SFError(
|
|
||||||
SF_PARSE_ERROR,
|
|
||||||
`assertGateCoverage: turn "${turn}" is missing required gates: ${missing.join(", ")}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
export * from "./errors.js";
|
|
||||||
export * from "./gate-registry.js";
|
|
||||||
export * from "./sf-db.js";
|
|
||||||
export * from "./task-frontmatter.js";
|
|
||||||
export * from "./workflow-logger.js";
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,495 +0,0 @@
|
||||||
/**
|
|
||||||
* Task Frontmatter - schema-backed task metadata
|
|
||||||
*
|
|
||||||
* Purpose: add structured fields to task records for risk assessment,
|
|
||||||
* mutation scope declaration, verification requirements, plan approval, and
|
|
||||||
* task lifecycle status while keeping scheduler status as a separate view field.
|
|
||||||
*
|
|
||||||
* Consumer: plan-v2 task creation, UOK gate runner, parallel orchestrator,
|
|
||||||
* sf-db row mapping, and task state machine.
|
|
||||||
*/
|
|
||||||
|
|
||||||
export const RISK_LEVELS = [
|
|
||||||
"none",
|
|
||||||
"low",
|
|
||||||
"medium",
|
|
||||||
"high",
|
|
||||||
"critical",
|
|
||||||
] as const;
|
|
||||||
export type RiskLevel = (typeof RISK_LEVELS)[number];
|
|
||||||
|
|
||||||
export const MUTATION_SCOPES = [
|
|
||||||
"none",
|
|
||||||
"docs-only",
|
|
||||||
"config",
|
|
||||||
"test-only",
|
|
||||||
"isolated",
|
|
||||||
"bounded",
|
|
||||||
"cross-cutting",
|
|
||||||
"systemic",
|
|
||||||
] as const;
|
|
||||||
export type MutationScope = (typeof MUTATION_SCOPES)[number];
|
|
||||||
|
|
||||||
export const VERIFICATION_TYPES = [
|
|
||||||
"none",
|
|
||||||
"self-check",
|
|
||||||
"review",
|
|
||||||
"test",
|
|
||||||
"integration",
|
|
||||||
"manual-qa",
|
|
||||||
] as const;
|
|
||||||
export type VerificationType = (typeof VERIFICATION_TYPES)[number];
|
|
||||||
|
|
||||||
export const PLAN_APPROVAL_STATES = [
|
|
||||||
"not-required",
|
|
||||||
"pending",
|
|
||||||
"approved",
|
|
||||||
"rejected",
|
|
||||||
"auto-approved",
|
|
||||||
] as const;
|
|
||||||
export type PlanApprovalState = (typeof PLAN_APPROVAL_STATES)[number];
|
|
||||||
|
|
||||||
export const TASK_STATUSES = [
|
|
||||||
"todo",
|
|
||||||
"running",
|
|
||||||
"verifying",
|
|
||||||
"reviewing",
|
|
||||||
"done",
|
|
||||||
"blocked",
|
|
||||||
"paused",
|
|
||||||
"failed",
|
|
||||||
"cancelled",
|
|
||||||
"retrying",
|
|
||||||
] as const;
|
|
||||||
export type TaskStatus = (typeof TASK_STATUSES)[number];
|
|
||||||
|
|
||||||
export const SCHEDULER_STATUSES = [
|
|
||||||
"queued",
|
|
||||||
"due",
|
|
||||||
"claimed",
|
|
||||||
"dispatched",
|
|
||||||
"consumed",
|
|
||||||
"expired",
|
|
||||||
] as const;
|
|
||||||
export type SchedulerStatus = (typeof SCHEDULER_STATUSES)[number];
|
|
||||||
|
|
||||||
export interface TaskFrontmatter {
|
|
||||||
risk: RiskLevel;
|
|
||||||
mutationScope: MutationScope;
|
|
||||||
verification: VerificationType;
|
|
||||||
planApproval: PlanApprovalState;
|
|
||||||
taskStatus: TaskStatus;
|
|
||||||
schedulerStatus: SchedulerStatus;
|
|
||||||
estimatedEffort: number | null;
|
|
||||||
keyFiles: string[];
|
|
||||||
dependencies: string[];
|
|
||||||
blocksParallel: boolean;
|
|
||||||
requiresUserInput: boolean;
|
|
||||||
autoRetry: boolean;
|
|
||||||
maxRetries: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
const TASK_STATUS_ALIASES: Record<string, string> = {
|
|
||||||
complete: "done",
|
|
||||||
completed: "done",
|
|
||||||
in_progress: "running",
|
|
||||||
"manual-attention": "reviewing",
|
|
||||||
manual_attention: "reviewing",
|
|
||||||
pending: "todo",
|
|
||||||
review: "reviewing",
|
|
||||||
};
|
|
||||||
|
|
||||||
const SCHEDULER_STATUS_ALIASES: Record<string, string> = {
|
|
||||||
completed: "consumed",
|
|
||||||
done: "consumed",
|
|
||||||
pending: "queued",
|
|
||||||
};
|
|
||||||
|
|
||||||
export const DEFAULT_TASK_FRONTMATTER: TaskFrontmatter = {
|
|
||||||
risk: "low",
|
|
||||||
mutationScope: "isolated",
|
|
||||||
verification: "self-check",
|
|
||||||
planApproval: "not-required",
|
|
||||||
taskStatus: "todo",
|
|
||||||
schedulerStatus: "queued",
|
|
||||||
estimatedEffort: null,
|
|
||||||
keyFiles: [],
|
|
||||||
dependencies: [],
|
|
||||||
blocksParallel: false,
|
|
||||||
requiresUserInput: false,
|
|
||||||
autoRetry: true,
|
|
||||||
maxRetries: 2,
|
|
||||||
};
|
|
||||||
|
|
||||||
export function normalizeTaskStatus(value: unknown): string | null {
|
|
||||||
if (typeof value !== "string" || value.trim() === "") return "todo";
|
|
||||||
const status = value.trim().toLowerCase();
|
|
||||||
if ((TASK_STATUSES as readonly string[]).includes(status)) return status;
|
|
||||||
return TASK_STATUS_ALIASES[status] ?? null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function normalizeSchedulerStatus(value: unknown): string | null {
|
|
||||||
if (typeof value !== "string" || value.trim() === "") return "queued";
|
|
||||||
const status = value.trim().toLowerCase();
|
|
||||||
if ((SCHEDULER_STATUSES as readonly string[]).includes(status)) return status;
|
|
||||||
return SCHEDULER_STATUS_ALIASES[status] ?? null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeArray(value: unknown): string[] {
|
|
||||||
if (Array.isArray(value)) return value.filter((v) => typeof v === "string");
|
|
||||||
if (typeof value !== "string" || value.trim() === "") return [];
|
|
||||||
try {
|
|
||||||
const parsed = JSON.parse(value);
|
|
||||||
if (Array.isArray(parsed))
|
|
||||||
return parsed.filter((v) => typeof v === "string");
|
|
||||||
return [];
|
|
||||||
} catch {
|
|
||||||
return value
|
|
||||||
.split(",")
|
|
||||||
.map((v) => v.trim())
|
|
||||||
.filter(Boolean);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeBoolean(value: unknown): boolean {
|
|
||||||
if (value === true || value === 1) return true;
|
|
||||||
if (value === false || value === 0 || value == null) return false;
|
|
||||||
if (typeof value === "string") {
|
|
||||||
const normalized = value.trim().toLowerCase();
|
|
||||||
if (["1", "true", "yes", "y"].includes(normalized)) return true;
|
|
||||||
if (["0", "false", "no", "n", ""].includes(normalized)) return false;
|
|
||||||
}
|
|
||||||
return Boolean(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateChoice(
|
|
||||||
field: string,
|
|
||||||
value: unknown,
|
|
||||||
allowed: readonly string[],
|
|
||||||
normalized: Record<string, unknown>,
|
|
||||||
errors: string[],
|
|
||||||
): void {
|
|
||||||
if (value === undefined || value === null || value === "") return;
|
|
||||||
if (typeof value === "string" && allowed.includes(value)) {
|
|
||||||
normalized[field] = value;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
errors.push(
|
|
||||||
`Invalid ${field} "${String(value)}". Must be one of: ${allowed.join(", ")}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface FrontmatterInput {
|
|
||||||
risk?: unknown;
|
|
||||||
mutationScope?: unknown;
|
|
||||||
verification?: unknown;
|
|
||||||
planApproval?: unknown;
|
|
||||||
taskStatus?: unknown;
|
|
||||||
schedulerStatus?: unknown;
|
|
||||||
estimatedEffort?: unknown;
|
|
||||||
keyFiles?: unknown;
|
|
||||||
dependencies?: unknown;
|
|
||||||
blocksParallel?: unknown;
|
|
||||||
requiresUserInput?: unknown;
|
|
||||||
autoRetry?: unknown;
|
|
||||||
maxRetries?: unknown;
|
|
||||||
[key: string]: unknown;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ValidationResult {
|
|
||||||
valid: boolean;
|
|
||||||
errors: string[];
|
|
||||||
normalized: TaskFrontmatter;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function validateTaskFrontmatter(
|
|
||||||
frontmatter: FrontmatterInput = {},
|
|
||||||
): ValidationResult {
|
|
||||||
const errors: string[] = [];
|
|
||||||
const normalized: Record<string, unknown> = {
|
|
||||||
...DEFAULT_TASK_FRONTMATTER,
|
|
||||||
keyFiles: [],
|
|
||||||
dependencies: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
validateChoice("risk", frontmatter.risk, RISK_LEVELS, normalized, errors);
|
|
||||||
validateChoice(
|
|
||||||
"mutationScope",
|
|
||||||
frontmatter.mutationScope,
|
|
||||||
MUTATION_SCOPES,
|
|
||||||
normalized,
|
|
||||||
errors,
|
|
||||||
);
|
|
||||||
validateChoice(
|
|
||||||
"verification",
|
|
||||||
frontmatter.verification,
|
|
||||||
VERIFICATION_TYPES,
|
|
||||||
normalized,
|
|
||||||
errors,
|
|
||||||
);
|
|
||||||
validateChoice(
|
|
||||||
"planApproval",
|
|
||||||
frontmatter.planApproval,
|
|
||||||
PLAN_APPROVAL_STATES,
|
|
||||||
normalized,
|
|
||||||
errors,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (frontmatter.taskStatus !== undefined) {
|
|
||||||
const status = normalizeTaskStatus(frontmatter.taskStatus);
|
|
||||||
if (status) {
|
|
||||||
normalized.taskStatus = status;
|
|
||||||
} else {
|
|
||||||
errors.push(
|
|
||||||
`Invalid taskStatus "${String(frontmatter.taskStatus)}". Must be one of: ${TASK_STATUSES.join(", ")}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (frontmatter.schedulerStatus !== undefined) {
|
|
||||||
const status = normalizeSchedulerStatus(frontmatter.schedulerStatus);
|
|
||||||
if (status) {
|
|
||||||
normalized.schedulerStatus = status;
|
|
||||||
} else {
|
|
||||||
errors.push(
|
|
||||||
`Invalid schedulerStatus "${String(frontmatter.schedulerStatus)}". Must be one of: ${SCHEDULER_STATUSES.join(", ")}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (frontmatter.estimatedEffort !== undefined) {
|
|
||||||
const effort = Number(frontmatter.estimatedEffort);
|
|
||||||
if (!Number.isNaN(effort) && effort >= 0) {
|
|
||||||
normalized.estimatedEffort = effort;
|
|
||||||
} else if (frontmatter.estimatedEffort !== null) {
|
|
||||||
errors.push(
|
|
||||||
`Invalid estimatedEffort "${String(frontmatter.estimatedEffort)}". Must be a non-negative number or null.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (frontmatter.keyFiles !== undefined) {
|
|
||||||
normalized.keyFiles = normalizeArray(frontmatter.keyFiles);
|
|
||||||
}
|
|
||||||
if (frontmatter.dependencies !== undefined) {
|
|
||||||
normalized.dependencies = normalizeArray(frontmatter.dependencies);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const field of ["blocksParallel", "requiresUserInput", "autoRetry"]) {
|
|
||||||
if (frontmatter[field] !== undefined) {
|
|
||||||
normalized[field] = normalizeBoolean(frontmatter[field]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (frontmatter.maxRetries !== undefined) {
|
|
||||||
const retries = Number(frontmatter.maxRetries);
|
|
||||||
if (Number.isInteger(retries) && retries >= 0 && retries <= 10) {
|
|
||||||
normalized.maxRetries = retries;
|
|
||||||
} else {
|
|
||||||
errors.push(
|
|
||||||
`Invalid maxRetries "${String(frontmatter.maxRetries)}". Must be an integer 0-10.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
valid: errors.length === 0,
|
|
||||||
errors,
|
|
||||||
normalized: normalized as unknown as TaskFrontmatter,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TaskRecord {
|
|
||||||
risk?: unknown;
|
|
||||||
mutation_scope?: unknown;
|
|
||||||
mutationScope?: unknown;
|
|
||||||
verification_type?: unknown;
|
|
||||||
verificationType?: unknown;
|
|
||||||
verification?: unknown;
|
|
||||||
plan_approval?: unknown;
|
|
||||||
planApproval?: unknown;
|
|
||||||
task_status?: unknown;
|
|
||||||
taskStatus?: unknown;
|
|
||||||
status?: unknown;
|
|
||||||
scheduler_status?: unknown;
|
|
||||||
schedulerStatus?: unknown;
|
|
||||||
estimated_effort?: unknown;
|
|
||||||
estimatedEffort?: unknown;
|
|
||||||
frontmatter_key_files?: unknown;
|
|
||||||
frontmatterKeyFiles?: unknown;
|
|
||||||
files?: unknown;
|
|
||||||
key_files?: unknown;
|
|
||||||
keyFiles?: unknown;
|
|
||||||
dependencies?: unknown;
|
|
||||||
depends_on?: unknown;
|
|
||||||
dependsOn?: unknown;
|
|
||||||
depends?: unknown;
|
|
||||||
blocks_parallel?: unknown;
|
|
||||||
blocksParallel?: unknown;
|
|
||||||
requires_user_input?: unknown;
|
|
||||||
requiresUserInput?: unknown;
|
|
||||||
auto_retry?: unknown;
|
|
||||||
autoRetry?: unknown;
|
|
||||||
max_retries?: unknown;
|
|
||||||
maxRetries?: unknown;
|
|
||||||
frontmatter?: TaskFrontmatter;
|
|
||||||
[key: string]: unknown;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function taskFrontmatterFromRecord(
|
|
||||||
task: TaskRecord = {},
|
|
||||||
overrides: Partial<FrontmatterInput> = {},
|
|
||||||
): ValidationResult {
|
|
||||||
const rawFrontmatter: FrontmatterInput = {
|
|
||||||
risk: task.risk,
|
|
||||||
mutationScope: task.mutation_scope ?? task.mutationScope,
|
|
||||||
verification:
|
|
||||||
task.verification_type ?? task.verificationType ?? task.verification,
|
|
||||||
planApproval: task.plan_approval ?? task.planApproval,
|
|
||||||
taskStatus: task.task_status ?? task.taskStatus ?? task.status,
|
|
||||||
schedulerStatus: task.scheduler_status ?? task.schedulerStatus,
|
|
||||||
estimatedEffort: task.estimated_effort ?? task.estimatedEffort,
|
|
||||||
keyFiles:
|
|
||||||
task.frontmatter_key_files ??
|
|
||||||
task.frontmatterKeyFiles ??
|
|
||||||
task.files ??
|
|
||||||
task.key_files ??
|
|
||||||
task.keyFiles ??
|
|
||||||
[],
|
|
||||||
dependencies:
|
|
||||||
task.dependencies ??
|
|
||||||
task.depends_on ??
|
|
||||||
task.dependsOn ??
|
|
||||||
task.depends ??
|
|
||||||
[],
|
|
||||||
blocksParallel: task.blocks_parallel ?? task.blocksParallel,
|
|
||||||
requiresUserInput: task.requires_user_input ?? task.requiresUserInput,
|
|
||||||
autoRetry: task.auto_retry ?? task.autoRetry,
|
|
||||||
maxRetries: task.max_retries ?? task.maxRetries,
|
|
||||||
...overrides,
|
|
||||||
};
|
|
||||||
|
|
||||||
return validateTaskFrontmatter(rawFrontmatter);
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface BuiltTaskRecord extends TaskRecord {
|
|
||||||
frontmatter: TaskFrontmatter;
|
|
||||||
frontmatterValid: boolean;
|
|
||||||
frontmatterErrors: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function buildTaskRecord(
|
|
||||||
task: TaskRecord = {},
|
|
||||||
overrides: Partial<FrontmatterInput> = {},
|
|
||||||
): BuiltTaskRecord {
|
|
||||||
const validation = taskFrontmatterFromRecord(task, overrides);
|
|
||||||
return {
|
|
||||||
...task,
|
|
||||||
frontmatter: validation.normalized,
|
|
||||||
frontmatterValid: validation.valid,
|
|
||||||
frontmatterErrors: validation.errors,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function withTaskFrontmatter(
|
|
||||||
task: TaskRecord = {},
|
|
||||||
overrides: Partial<FrontmatterInput> = {},
|
|
||||||
): BuiltTaskRecord {
|
|
||||||
return buildTaskRecord(task, overrides);
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ParallelCheckResult {
|
|
||||||
canParallel: boolean;
|
|
||||||
reason?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function canRunInParallel(
|
|
||||||
taskA: TaskRecord,
|
|
||||||
taskB: TaskRecord,
|
|
||||||
): ParallelCheckResult {
|
|
||||||
if (
|
|
||||||
!taskA ||
|
|
||||||
!taskB ||
|
|
||||||
typeof taskA !== "object" ||
|
|
||||||
typeof taskB !== "object"
|
|
||||||
) {
|
|
||||||
return { canParallel: false, reason: "Invalid task input" };
|
|
||||||
}
|
|
||||||
const fmA = taskA.frontmatter ?? buildTaskRecord(taskA).frontmatter;
|
|
||||||
const fmB = taskB.frontmatter ?? buildTaskRecord(taskB).frontmatter;
|
|
||||||
|
|
||||||
if (fmA.blocksParallel || fmB.blocksParallel) {
|
|
||||||
return {
|
|
||||||
canParallel: false,
|
|
||||||
reason: "One or both tasks block parallel execution",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fmA.mutationScope === "systemic" || fmB.mutationScope === "systemic") {
|
|
||||||
return {
|
|
||||||
canParallel: false,
|
|
||||||
reason: "One or both tasks have systemic mutation scope",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const highRisk = ["high", "critical"];
|
|
||||||
if (highRisk.includes(fmA.risk) && highRisk.includes(fmB.risk)) {
|
|
||||||
return { canParallel: false, reason: "Both tasks are high/critical risk" };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fmA.keyFiles.length > 0 && fmB.keyFiles.length > 0) {
|
|
||||||
const filesB = new Set(fmB.keyFiles);
|
|
||||||
const overlap = fmA.keyFiles.filter((file) => filesB.has(file));
|
|
||||||
if (overlap.length > 0) {
|
|
||||||
return {
|
|
||||||
canParallel: false,
|
|
||||||
reason: `File overlap: ${overlap.join(", ")}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { canParallel: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function canTasksRunInParallel(
|
|
||||||
taskA: TaskRecord,
|
|
||||||
taskB: TaskRecord,
|
|
||||||
): ParallelCheckResult {
|
|
||||||
return canRunInParallel(taskA, taskB);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function computeTaskPriority(task: TaskRecord): number {
|
|
||||||
const fm = task.frontmatter ?? buildTaskRecord(task).frontmatter;
|
|
||||||
let score = 50;
|
|
||||||
|
|
||||||
const riskScores: Record<string, number> = {
|
|
||||||
none: 0,
|
|
||||||
low: 5,
|
|
||||||
medium: 15,
|
|
||||||
high: 30,
|
|
||||||
critical: 50,
|
|
||||||
};
|
|
||||||
score += riskScores[fm.risk] ?? 0;
|
|
||||||
|
|
||||||
const scopeScores: Record<string, number> = {
|
|
||||||
none: 0,
|
|
||||||
"docs-only": 2,
|
|
||||||
config: 5,
|
|
||||||
"test-only": 3,
|
|
||||||
isolated: 5,
|
|
||||||
bounded: 10,
|
|
||||||
"cross-cutting": 25,
|
|
||||||
systemic: 40,
|
|
||||||
};
|
|
||||||
score += scopeScores[fm.mutationScope] ?? 0;
|
|
||||||
|
|
||||||
if (fm.blocksParallel) score += 20;
|
|
||||||
if (fm.requiresUserInput) score += 10;
|
|
||||||
if (fm.planApproval === "pending") score += 10;
|
|
||||||
|
|
||||||
return Math.min(100, score);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function scoreTaskFrontmatterPriority(task: TaskRecord): number {
|
|
||||||
return computeTaskPriority(task);
|
|
||||||
}
|
|
||||||
|
|
@ -1,69 +0,0 @@
|
||||||
/**
|
|
||||||
* Trace event reader for .sf/traces/ directory.
|
|
||||||
*
|
|
||||||
* Purpose: read typed trace events from JSONL files for gate statistics
|
|
||||||
* and performance analysis. Uses a minimal sfRoot implementation (fast path:
|
|
||||||
* basePath/.sf) to avoid pulling in the full paths.js dependency chain.
|
|
||||||
*
|
|
||||||
* Consumer: sf-db.ts gate statistics functions (getGateStats, etc.)
|
|
||||||
*/
|
|
||||||
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
|
|
||||||
import { join } from "node:path";
|
|
||||||
|
|
||||||
function sfRoot(basePath: string): string {
|
|
||||||
return join(basePath, ".sf");
|
|
||||||
}
|
|
||||||
|
|
||||||
function tracesDir(basePath: string): string {
|
|
||||||
return join(sfRoot(basePath), "traces");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function appendTraceEvent(
|
|
||||||
basePath: string,
|
|
||||||
traceId: string,
|
|
||||||
event: Record<string, unknown>,
|
|
||||||
): void {
|
|
||||||
if (!basePath || !traceId) return;
|
|
||||||
// No-op in pi-agent-core — writes are handled by the sf extension.
|
|
||||||
void event;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function readTraceEvents(
|
|
||||||
basePath: string,
|
|
||||||
type: string,
|
|
||||||
windowHours = 24,
|
|
||||||
): Record<string, unknown>[] {
|
|
||||||
const dir = tracesDir(basePath);
|
|
||||||
if (!existsSync(dir)) return [];
|
|
||||||
const cutoff = Date.now() - windowHours * 60 * 60 * 1000;
|
|
||||||
const results: Record<string, unknown>[] = [];
|
|
||||||
let files: string[];
|
|
||||||
try {
|
|
||||||
files = readdirSync(dir).filter(
|
|
||||||
(f) => f.endsWith(".jsonl") && f !== "latest",
|
|
||||||
);
|
|
||||||
} catch {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
for (const file of files) {
|
|
||||||
try {
|
|
||||||
const filePath = join(dir, file);
|
|
||||||
if (statSync(filePath).mtimeMs < cutoff) continue;
|
|
||||||
const lines = readFileSync(filePath, "utf-8").split("\n").filter(Boolean);
|
|
||||||
for (const line of lines) {
|
|
||||||
try {
|
|
||||||
const ev = JSON.parse(line) as Record<string, unknown>;
|
|
||||||
if (!type || ev["type"] === type) {
|
|
||||||
const tsMs = ev["ts"] ? new Date(ev["ts"] as string).getTime() : 0;
|
|
||||||
if (!ev["ts"] || tsMs >= cutoff) results.push(ev);
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
/* skip malformed lines */
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
/* skip unreadable files */
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
@ -1,94 +0,0 @@
|
||||||
/**
|
|
||||||
* SF Workflow Logger — minimal adapter for pi-agent-core.
|
|
||||||
*
|
|
||||||
* Purpose: provide logWarning/logError without pulling in the full
|
|
||||||
* workflow-logger dependency chain (file-lock, notification-store, paths).
|
|
||||||
* The sf extension's own workflow-logger handles the full audit/notification
|
|
||||||
* pipeline; this module is used only by sf-db.ts in pi-agent-core context.
|
|
||||||
*
|
|
||||||
* Consumer: sf-db.ts in pi-agent-core for operational warnings and errors.
|
|
||||||
*/
|
|
||||||
|
|
||||||
export interface LogEntry {
|
|
||||||
ts: string;
|
|
||||||
severity: "warn" | "error";
|
|
||||||
component: string;
|
|
||||||
message: string;
|
|
||||||
context?: Record<string, string>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const MAX_BUFFER = 100;
|
|
||||||
let _buffer: LogEntry[] = [];
|
|
||||||
let _stderrEnabled = true;
|
|
||||||
|
|
||||||
export function setStderrLoggingEnabled(enabled: boolean): boolean {
|
|
||||||
const previous = _stderrEnabled;
|
|
||||||
_stderrEnabled = enabled;
|
|
||||||
return previous;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function logWarning(
|
|
||||||
component: string,
|
|
||||||
message: string,
|
|
||||||
context?: Record<string, string>,
|
|
||||||
): void {
|
|
||||||
_push("warn", component, message, context);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function logError(
|
|
||||||
component: string,
|
|
||||||
message: string,
|
|
||||||
context?: Record<string, string>,
|
|
||||||
): void {
|
|
||||||
_push("error", component, message, context);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function drainLogs(): LogEntry[] {
|
|
||||||
const entries = _buffer;
|
|
||||||
_buffer = [];
|
|
||||||
return entries;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function peekLogs(): LogEntry[] {
|
|
||||||
return _buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function hasErrors(): boolean {
|
|
||||||
return _buffer.some((e) => e.severity === "error");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function hasWarnings(): boolean {
|
|
||||||
return _buffer.some((e) => e.severity === "warn");
|
|
||||||
}
|
|
||||||
|
|
||||||
export function hasAnyIssues(): boolean {
|
|
||||||
return _buffer.length > 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function _resetLogs(): void {
|
|
||||||
_buffer = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
function _push(
|
|
||||||
severity: "warn" | "error",
|
|
||||||
component: string,
|
|
||||||
message: string,
|
|
||||||
context?: Record<string, string>,
|
|
||||||
): void {
|
|
||||||
const entry: LogEntry = {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
severity,
|
|
||||||
component,
|
|
||||||
message,
|
|
||||||
...(context ? { context } : {}),
|
|
||||||
};
|
|
||||||
const prefix = severity === "error" ? "ERROR" : "WARN";
|
|
||||||
const ctxStr = context ? ` ${JSON.stringify(context)}` : "";
|
|
||||||
if (_stderrEnabled) {
|
|
||||||
process.stderr.write(`[sf:${component}] ${prefix}: ${message}${ctxStr}\n`);
|
|
||||||
}
|
|
||||||
_buffer.push(entry);
|
|
||||||
if (_buffer.length > MAX_BUFFER) {
|
|
||||||
_buffer.shift();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
export * from "./db/index.js";
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"target": "ES2024",
|
|
||||||
"module": "Node16",
|
|
||||||
"lib": ["ES2024"],
|
|
||||||
"strict": true,
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"skipLibCheck": true,
|
|
||||||
"incremental": true,
|
|
||||||
"forceConsistentCasingInFileNames": true,
|
|
||||||
"declaration": true,
|
|
||||||
"declarationMap": true,
|
|
||||||
"sourceMap": true,
|
|
||||||
"inlineSources": true,
|
|
||||||
"inlineSourceMap": false,
|
|
||||||
"moduleResolution": "Node16",
|
|
||||||
"resolveJsonModule": true,
|
|
||||||
"allowImportingTsExtensions": false,
|
|
||||||
"useDefineForClassFields": false,
|
|
||||||
"types": ["node"],
|
|
||||||
"outDir": "./dist",
|
|
||||||
"rootDir": "./src"
|
|
||||||
},
|
|
||||||
"include": ["src/**/*.ts"],
|
|
||||||
"exclude": [
|
|
||||||
"node_modules",
|
|
||||||
"dist",
|
|
||||||
"**/*.d.ts",
|
|
||||||
"src/**/*.d.ts",
|
|
||||||
"src/**/*.test.ts"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
49
sf-worker/go.mod
Normal file
49
sf-worker/go.mod
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
module github.com/singularity-ng/singularity-forge/sf-worker
|
||||||
|
|
||||||
|
go 1.26.2
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
|
||||||
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||||
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||||
|
github.com/charmbracelet/bubbletea v1.3.4 // indirect
|
||||||
|
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
|
||||||
|
github.com/charmbracelet/keygen v0.5.3 // indirect
|
||||||
|
github.com/charmbracelet/lipgloss v1.1.0 // indirect
|
||||||
|
github.com/charmbracelet/log v1.0.0 // indirect
|
||||||
|
github.com/charmbracelet/ssh v0.0.0-20250128164007-98fd5ae11894 // indirect
|
||||||
|
github.com/charmbracelet/wish v1.4.7 // indirect
|
||||||
|
github.com/charmbracelet/x/ansi v0.8.0 // indirect
|
||||||
|
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
|
||||||
|
github.com/charmbracelet/x/conpty v0.1.0 // indirect
|
||||||
|
github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86 // indirect
|
||||||
|
github.com/charmbracelet/x/input v0.3.4 // indirect
|
||||||
|
github.com/charmbracelet/x/term v0.2.1 // indirect
|
||||||
|
github.com/charmbracelet/x/termios v0.1.0 // indirect
|
||||||
|
github.com/charmbracelet/x/windows v0.2.0 // indirect
|
||||||
|
github.com/creack/pty v1.1.24 // indirect
|
||||||
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
||||||
|
github.com/go-logfmt/logfmt v0.6.1 // indirect
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/mattn/go-localereader v0.0.1 // indirect
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||||
|
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
|
||||||
|
github.com/muesli/cancelreader v0.2.2 // indirect
|
||||||
|
github.com/muesli/termenv v0.16.0 // indirect
|
||||||
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||||
|
github.com/prometheus/client_golang v1.23.2 // indirect
|
||||||
|
github.com/prometheus/client_model v0.6.2 // indirect
|
||||||
|
github.com/prometheus/common v0.66.1 // indirect
|
||||||
|
github.com/prometheus/procfs v0.16.1 // indirect
|
||||||
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||||
|
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||||
|
golang.org/x/crypto v0.51.0 // indirect
|
||||||
|
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 // indirect
|
||||||
|
golang.org/x/sync v0.20.0 // indirect
|
||||||
|
golang.org/x/sys v0.44.0 // indirect
|
||||||
|
golang.org/x/text v0.37.0 // indirect
|
||||||
|
google.golang.org/protobuf v1.36.8 // indirect
|
||||||
|
)
|
||||||
90
sf-worker/go.sum
Normal file
90
sf-worker/go.sum
Normal file
|
|
@ -0,0 +1,90 @@
|
||||||
|
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
|
||||||
|
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
|
||||||
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||||
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||||
|
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||||
|
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||||
|
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
|
github.com/charmbracelet/bubbletea v1.3.4 h1:kCg7B+jSCFPLYRA52SDZjr51kG/fMUEoPoZrkaDHyoI=
|
||||||
|
github.com/charmbracelet/bubbletea v1.3.4/go.mod h1:dtcUCyCGEX3g9tosuYiut3MXgY/Jsv9nKVdibKKRRXo=
|
||||||
|
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
|
||||||
|
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
|
||||||
|
github.com/charmbracelet/keygen v0.5.3 h1:2MSDC62OUbDy6VmjIE2jM24LuXUvKywLCmaJDmr/Z/4=
|
||||||
|
github.com/charmbracelet/keygen v0.5.3/go.mod h1:TcpNoMAO5GSmhx3SgcEMqCrtn8BahKhB8AlwnLjRUpk=
|
||||||
|
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
|
||||||
|
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
|
||||||
|
github.com/charmbracelet/log v1.0.0 h1:HVVVMmfOorfj3BA9i8X8UL69Hoz9lI0PYwXfJvOdRc4=
|
||||||
|
github.com/charmbracelet/log v1.0.0/go.mod h1:uYgY3SmLpwJWxmlrPwXvzVYujxis1vAKRV/0VQB7yWA=
|
||||||
|
github.com/charmbracelet/ssh v0.0.0-20250128164007-98fd5ae11894 h1:Ffon9TbltLGBsT6XE//YvNuu4OAaThXioqalhH11xEw=
|
||||||
|
github.com/charmbracelet/ssh v0.0.0-20250128164007-98fd5ae11894/go.mod h1:hg+I6gvlMl16nS9ZzQNgBIrrCasGwEw0QiLsDcP01Ko=
|
||||||
|
github.com/charmbracelet/wish v1.4.7 h1:O+jdLac3s6GaqkOHHSwezejNK04vl6VjO1A+hl8J8Yc=
|
||||||
|
github.com/charmbracelet/wish v1.4.7/go.mod h1:OBZ8vC62JC5cvbxJLh+bIWtG7Ctmct+ewziuUWK+G14=
|
||||||
|
github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
|
||||||
|
github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q=
|
||||||
|
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8=
|
||||||
|
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
|
||||||
|
github.com/charmbracelet/x/conpty v0.1.0 h1:4zc8KaIcbiL4mghEON8D72agYtSeIgq8FSThSPQIb+U=
|
||||||
|
github.com/charmbracelet/x/conpty v0.1.0/go.mod h1:rMFsDJoDwVmiYM10aD4bH2XiRgwI7NYJtQgl5yskjEQ=
|
||||||
|
github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86 h1:JSt3B+U9iqk37QUU2Rvb6DSBYRLtWqFqfxf8l5hOZUA=
|
||||||
|
github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86/go.mod h1:2P0UgXMEa6TsToMSuFqKFQR+fZTO9CNGUNokkPatT/0=
|
||||||
|
github.com/charmbracelet/x/input v0.3.4 h1:Mujmnv/4DaitU0p+kIsrlfZl/UlmeLKw1wAP3e1fMN0=
|
||||||
|
github.com/charmbracelet/x/input v0.3.4/go.mod h1:JI8RcvdZWQIhn09VzeK3hdp4lTz7+yhiEdpEQtZN+2c=
|
||||||
|
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
|
||||||
|
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
|
||||||
|
github.com/charmbracelet/x/termios v0.1.0 h1:y4rjAHeFksBAfGbkRDmVinMg7x7DELIGAFbdNvxg97k=
|
||||||
|
github.com/charmbracelet/x/termios v0.1.0/go.mod h1:H/EVv/KRnrYjz+fCYa9bsKdqF3S8ouDK0AZEbG7r+/U=
|
||||||
|
github.com/charmbracelet/x/windows v0.2.0 h1:ilXA1GJjTNkgOm94CLPeSz7rar54jtFatdmoiONPuEw=
|
||||||
|
github.com/charmbracelet/x/windows v0.2.0/go.mod h1:ZibNFR49ZFqCXgP76sYanisxRyC+EYrBE7TTknD8s1s=
|
||||||
|
github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s=
|
||||||
|
github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE=
|
||||||
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
||||||
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
||||||
|
github.com/go-logfmt/logfmt v0.6.1 h1:4hvbpePJKnIzH1B+8OR/JPbTx37NktoI9LE2QZBBkvE=
|
||||||
|
github.com/go-logfmt/logfmt v0.6.1/go.mod h1:EV2pOAQoZaT1ZXZbqDl5hrymndi4SY9ED9/z6CO0XAk=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||||
|
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
|
||||||
|
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||||
|
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
|
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
|
||||||
|
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
|
||||||
|
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
|
||||||
|
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
|
||||||
|
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||||
|
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||||
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||||
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||||
|
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
|
||||||
|
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
|
||||||
|
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||||
|
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||||
|
github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
|
||||||
|
github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
|
||||||
|
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
|
||||||
|
github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
|
||||||
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||||
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||||
|
go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||||
|
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||||
|
golang.org/x/crypto v0.51.0 h1:IBPXwPfKxY7cWQZ38ZCIRPI50YLeevDLlLnyC5wRGTI=
|
||||||
|
golang.org/x/crypto v0.51.0/go.mod h1:8AdwkbraGNABw2kOX6YFPs3WM22XqI4EXEd8g+x7Oc8=
|
||||||
|
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
|
||||||
|
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
|
||||||
|
golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
|
||||||
|
golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
|
||||||
|
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.44.0 h1:ildZl3J4uzeKP07r2F++Op7E9B29JRUy+a27EibtBTQ=
|
||||||
|
golang.org/x/sys v0.44.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
|
||||||
|
golang.org/x/text v0.37.0 h1:Cqjiwd9eSg8e0QAkyCaQTNHFIIzWtidPahFWR83rTrc=
|
||||||
|
golang.org/x/text v0.37.0/go.mod h1:a5sjxXGs9hsn/AJVwuElvCAo9v8QYLzvavO5z2PiM38=
|
||||||
|
google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc=
|
||||||
|
google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
100
sf-worker/main.go
Normal file
100
sf-worker/main.go
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
// sf-worker — SSH PTY daemon that executes sf headless runs on behalf of a remote orchestrator.
|
||||||
|
//
|
||||||
|
// Purpose: allow the SF orchestrator to dispatch autonomous unit attempts to remote hosts
|
||||||
|
// (GPU boxes, Windows machines, parallel workers) over SSH without requiring a full SF
|
||||||
|
// installation on the controlling machine.
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
//
|
||||||
|
// sf-worker [flags]
|
||||||
|
// --addr SSH listen address (default ":2222")
|
||||||
|
// --metrics-addr Prometheus /metrics address (default ":9100")
|
||||||
|
// --host-key Path to SSH host key (default "~/.sf/worker_host_key")
|
||||||
|
// --authorized-keys Path to authorized_keys file (default "~/.sf/worker_authorized_keys")
|
||||||
|
// --sf-bin Path to sf binary (default: resolved from $PATH)
|
||||||
|
// --max-sessions Maximum concurrent sessions (default 16)
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"path/filepath"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/log"
|
||||||
|
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||||
|
)
|
||||||
|
|
||||||
|
func defaultPath(rel string) string {
|
||||||
|
home, err := os.UserHomeDir()
|
||||||
|
if err != nil {
|
||||||
|
return rel
|
||||||
|
}
|
||||||
|
return filepath.Join(home, ".sf", rel)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
addr := flag.String("addr", ":2222", "SSH listen address")
|
||||||
|
metricsAddr := flag.String("metrics-addr", ":9100", "Prometheus metrics address")
|
||||||
|
hostKeyPath := flag.String("host-key", defaultPath("worker_host_key"), "SSH host key path (RSA/Ed25519 PEM)")
|
||||||
|
authorizedKeysPath := flag.String("authorized-keys", defaultPath("worker_authorized_keys"), "authorized_keys path")
|
||||||
|
sfBin := flag.String("sf-bin", "", "Path to sf binary (default: resolved from $PATH)")
|
||||||
|
maxSessions := flag.Int("max-sessions", 16, "Maximum concurrent sessions")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
logger := log.NewWithOptions(os.Stderr, log.Options{
|
||||||
|
ReportTimestamp: true,
|
||||||
|
TimeFormat: time.RFC3339,
|
||||||
|
Level: log.InfoLevel,
|
||||||
|
})
|
||||||
|
|
||||||
|
srv, err := NewServer(ServerConfig{
|
||||||
|
Addr: *addr,
|
||||||
|
HostKeyPath: *hostKeyPath,
|
||||||
|
AuthorizedKeysPath: *authorizedKeysPath,
|
||||||
|
SFBin: *sfBin,
|
||||||
|
MaxSessions: *maxSessions,
|
||||||
|
Logger: logger,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("failed to create server", "err", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prometheus metrics endpoint.
|
||||||
|
metricsMux := http.NewServeMux()
|
||||||
|
metricsMux.Handle("/metrics", promhttp.Handler())
|
||||||
|
metricsMux.HandleFunc("/health", func(w http.ResponseWriter, _ *http.Request) {
|
||||||
|
fmt.Fprintln(w, "ok")
|
||||||
|
})
|
||||||
|
metricsServer := &http.Server{
|
||||||
|
Addr: *metricsAddr,
|
||||||
|
Handler: metricsMux,
|
||||||
|
}
|
||||||
|
go func() {
|
||||||
|
logger.Info("metrics server listening", "addr", *metricsAddr)
|
||||||
|
if err := metricsServer.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||||
|
logger.Error("metrics server error", "err", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
ctx, cancel := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
logger.Info("sf-worker starting", "addr", *addr)
|
||||||
|
if err := srv.ListenAndServe(ctx); err != nil {
|
||||||
|
logger.Error("server error", "err", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer shutdownCancel()
|
||||||
|
_ = metricsServer.Shutdown(shutdownCtx)
|
||||||
|
|
||||||
|
logger.Info("sf-worker stopped")
|
||||||
|
}
|
||||||
215
sf-worker/server.go
Normal file
215
sf-worker/server.go
Normal file
|
|
@ -0,0 +1,215 @@
|
||||||
|
// server.go — SSH server setup and connection acceptance.
|
||||||
|
//
|
||||||
|
// Purpose: accept SSH connections from the SF orchestrator, enforce key-based auth,
|
||||||
|
// and hand each session off to runSession for PTY execution.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"sync/atomic"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/log"
|
||||||
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
|
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||||
|
"golang.org/x/crypto/ssh"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
metricActiveSessions = promauto.NewGauge(prometheus.GaugeOpts{
|
||||||
|
Name: "sfworker_active_sessions",
|
||||||
|
Help: "Currently active SSH sessions.",
|
||||||
|
})
|
||||||
|
metricSessionsTotal = promauto.NewCounterVec(prometheus.CounterOpts{
|
||||||
|
Name: "sfworker_sessions_total",
|
||||||
|
Help: "Total SSH sessions by outcome.",
|
||||||
|
}, []string{"outcome"})
|
||||||
|
metricSessionDuration = promauto.NewHistogram(prometheus.HistogramOpts{
|
||||||
|
Name: "sfworker_session_duration_seconds",
|
||||||
|
Help: "Session duration in seconds.",
|
||||||
|
Buckets: prometheus.ExponentialBuckets(1, 2, 12),
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// ServerConfig holds the sf-worker SSH server configuration.
|
||||||
|
type ServerConfig struct {
|
||||||
|
Addr string
|
||||||
|
HostKeyPath string
|
||||||
|
AuthorizedKeysPath string
|
||||||
|
// SFBin is the path to the sf binary. If empty, resolved from $PATH.
|
||||||
|
SFBin string
|
||||||
|
MaxSessions int
|
||||||
|
Logger *log.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// Server is the sf-worker SSH server.
|
||||||
|
type Server struct {
|
||||||
|
cfg ServerConfig
|
||||||
|
sshConfig *ssh.ServerConfig
|
||||||
|
sfBin string
|
||||||
|
activeSess atomic.Int32
|
||||||
|
logger *log.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewServer constructs and configures the SSH server.
|
||||||
|
func NewServer(cfg ServerConfig) (*Server, error) {
|
||||||
|
if cfg.MaxSessions <= 0 {
|
||||||
|
cfg.MaxSessions = 16
|
||||||
|
}
|
||||||
|
|
||||||
|
sfBin := cfg.SFBin
|
||||||
|
if sfBin == "" {
|
||||||
|
var err error
|
||||||
|
sfBin, err = exec.LookPath("sf")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("sf binary not found in PATH (set --sf-bin): %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
hostKey, err := loadOrGenerateHostKey(cfg.HostKeyPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("host key: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
authorizedKeys, err := loadAuthorizedKeys(cfg.AuthorizedKeysPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("authorized keys: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
sshConfig := &ssh.ServerConfig{
|
||||||
|
PublicKeyCallback: func(conn ssh.ConnMetadata, key ssh.PublicKey) (*ssh.Permissions, error) {
|
||||||
|
fp := ssh.FingerprintSHA256(key)
|
||||||
|
if _, ok := authorizedKeys[fp]; ok {
|
||||||
|
return &ssh.Permissions{Extensions: map[string]string{"fp": fp}}, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("key not authorized: %s", fp)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
sshConfig.AddHostKey(hostKey)
|
||||||
|
|
||||||
|
return &Server{
|
||||||
|
cfg: cfg,
|
||||||
|
sshConfig: sshConfig,
|
||||||
|
sfBin: sfBin,
|
||||||
|
logger: cfg.Logger,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListenAndServe starts accepting SSH connections until ctx is cancelled.
|
||||||
|
func (s *Server) ListenAndServe(ctx context.Context) error {
|
||||||
|
ln, err := net.Listen("tcp", s.cfg.Addr)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("listen %s: %w", s.cfg.Addr, err)
|
||||||
|
}
|
||||||
|
s.logger.Info("SSH server listening", "addr", s.cfg.Addr, "sf-bin", s.sfBin)
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
go func() {
|
||||||
|
<-ctx.Done()
|
||||||
|
_ = ln.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
|
for {
|
||||||
|
conn, err := ln.Accept()
|
||||||
|
if err != nil {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
s.logger.Warn("accept error", "err", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if int(s.activeSess.Load()) >= s.cfg.MaxSessions {
|
||||||
|
s.logger.Warn("max sessions reached, rejecting connection", "remote", conn.RemoteAddr())
|
||||||
|
_ = conn.Close()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
wg.Add(1)
|
||||||
|
go func(c net.Conn) {
|
||||||
|
defer wg.Done()
|
||||||
|
s.handleConn(ctx, c)
|
||||||
|
}(conn)
|
||||||
|
}
|
||||||
|
|
||||||
|
wg.Wait()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) handleConn(ctx context.Context, conn net.Conn) {
|
||||||
|
defer conn.Close()
|
||||||
|
start := time.Now()
|
||||||
|
remote := conn.RemoteAddr().String()
|
||||||
|
|
||||||
|
sshConn, chans, reqs, err := ssh.NewServerConn(conn, s.sshConfig)
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn("SSH handshake failed", "remote", remote, "err", err)
|
||||||
|
metricSessionsTotal.WithLabelValues("auth_failed").Inc()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer sshConn.Close()
|
||||||
|
|
||||||
|
fp := sshConn.Permissions.Extensions["fp"]
|
||||||
|
s.logger.Info("new connection", "remote", remote, "fp", fp)
|
||||||
|
s.activeSess.Add(1)
|
||||||
|
metricActiveSessions.Inc()
|
||||||
|
defer func() {
|
||||||
|
s.activeSess.Add(-1)
|
||||||
|
metricActiveSessions.Dec()
|
||||||
|
metricSessionDuration.Observe(time.Since(start).Seconds())
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Discard global requests.
|
||||||
|
go ssh.DiscardRequests(reqs)
|
||||||
|
|
||||||
|
for newChan := range chans {
|
||||||
|
if newChan.ChannelType() != "session" {
|
||||||
|
_ = newChan.Reject(ssh.UnknownChannelType, "only session channels accepted")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ch, requests, err := newChan.Accept()
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Warn("channel accept error", "err", err)
|
||||||
|
metricSessionsTotal.WithLabelValues("error").Inc()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
outcome := s.runSession(ctx, ch, requests, remote, fp)
|
||||||
|
metricSessionsTotal.WithLabelValues(outcome).Inc()
|
||||||
|
s.logger.Info("session ended", "remote", remote, "outcome", outcome, "duration", time.Since(start).Round(time.Millisecond))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadAuthorizedKeys parses an OpenSSH authorized_keys file into a fingerprint→key map.
|
||||||
|
func loadAuthorizedKeys(path string) (map[string]ssh.PublicKey, error) {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return map[string]ssh.PublicKey{}, nil // empty = no keys authorized (warn at connection time)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
keys := map[string]ssh.PublicKey{}
|
||||||
|
scanner := bufio.NewScanner(f)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
if line == "" || strings.HasPrefix(line, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pub, _, _, _, err := ssh.ParseAuthorizedKey([]byte(line))
|
||||||
|
if err != nil {
|
||||||
|
continue // skip malformed lines
|
||||||
|
}
|
||||||
|
keys[ssh.FingerprintSHA256(pub)] = pub
|
||||||
|
}
|
||||||
|
return keys, scanner.Err()
|
||||||
|
}
|
||||||
199
sf-worker/session.go
Normal file
199
sf-worker/session.go
Normal file
|
|
@ -0,0 +1,199 @@
|
||||||
|
// session.go — per-SSH-session PTY execution of sf headless.
|
||||||
|
//
|
||||||
|
// Purpose: spawn `sf headless <args>` in a real PTY for each authorized SSH session,
|
||||||
|
// wire the PTY I/O to the SSH channel, handle window-resize requests, and clean up
|
||||||
|
// on disconnect.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/creack/pty"
|
||||||
|
"golang.org/x/crypto/ssh"
|
||||||
|
)
|
||||||
|
|
||||||
|
// allowedSubcommands are the sf subcommands a worker session may execute.
|
||||||
|
// Restricting to headless prevents the orchestrator key from being used for arbitrary execution.
|
||||||
|
var allowedSubcommands = map[string]bool{
|
||||||
|
"headless": true,
|
||||||
|
"version": true,
|
||||||
|
"--version": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
// runSession handles a single SSH session channel: waits for an exec request,
|
||||||
|
// validates the command, spawns it in a PTY, and streams I/O until completion.
|
||||||
|
// Returns an outcome label for metrics: "ok", "rejected", "error".
|
||||||
|
func (s *Server) runSession(ctx context.Context, ch ssh.Channel, requests <-chan *ssh.Request, remote, fp string) string {
|
||||||
|
defer ch.Close()
|
||||||
|
|
||||||
|
type execReq struct {
|
||||||
|
cmd string
|
||||||
|
ptyW uint32
|
||||||
|
ptyH uint32
|
||||||
|
hasPTY bool
|
||||||
|
}
|
||||||
|
|
||||||
|
var pending execReq
|
||||||
|
|
||||||
|
// Collect session requests until we get "exec" or the channel closes.
|
||||||
|
for req := range requests {
|
||||||
|
switch req.Type {
|
||||||
|
case "pty-req":
|
||||||
|
// https://datatracker.ietf.org/doc/html/rfc4254#section-6.2
|
||||||
|
if len(req.Payload) < 4 {
|
||||||
|
_ = req.Reply(false, nil)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// term string length prefix
|
||||||
|
termLen := int(req.Payload[0])<<24 | int(req.Payload[1])<<16 | int(req.Payload[2])<<8 | int(req.Payload[3])
|
||||||
|
offset := 4 + termLen
|
||||||
|
if len(req.Payload) < offset+8 {
|
||||||
|
_ = req.Reply(false, nil)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pending.ptyW = uint32(req.Payload[offset])<<24 | uint32(req.Payload[offset+1])<<16 | uint32(req.Payload[offset+2])<<8 | uint32(req.Payload[offset+3])
|
||||||
|
pending.ptyH = uint32(req.Payload[offset+4])<<24 | uint32(req.Payload[offset+5])<<16 | uint32(req.Payload[offset+6])<<8 | uint32(req.Payload[offset+7])
|
||||||
|
pending.hasPTY = true
|
||||||
|
_ = req.Reply(true, nil)
|
||||||
|
|
||||||
|
case "window-change":
|
||||||
|
// Handled after PTY is started (ignore if no PTY yet).
|
||||||
|
_ = req.Reply(false, nil)
|
||||||
|
|
||||||
|
case "exec":
|
||||||
|
if len(req.Payload) < 4 {
|
||||||
|
_ = req.Reply(false, nil)
|
||||||
|
return "rejected"
|
||||||
|
}
|
||||||
|
cmdLen := int(req.Payload[0])<<24 | int(req.Payload[1])<<16 | int(req.Payload[2])<<8 | int(req.Payload[3])
|
||||||
|
if len(req.Payload) < 4+cmdLen {
|
||||||
|
_ = req.Reply(false, nil)
|
||||||
|
return "rejected"
|
||||||
|
}
|
||||||
|
pending.cmd = string(req.Payload[4 : 4+cmdLen])
|
||||||
|
_ = req.Reply(true, nil)
|
||||||
|
|
||||||
|
outcome := s.execCommand(ctx, ch, requests, pending.cmd, pending.ptyW, pending.ptyH, remote, fp)
|
||||||
|
return outcome
|
||||||
|
|
||||||
|
case "shell":
|
||||||
|
// No interactive shell — reject.
|
||||||
|
_ = req.Reply(false, nil)
|
||||||
|
sendExitStatus(ch, 1)
|
||||||
|
return "rejected"
|
||||||
|
|
||||||
|
default:
|
||||||
|
if req.WantReply {
|
||||||
|
_ = req.Reply(false, nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "ok"
|
||||||
|
}
|
||||||
|
|
||||||
|
// execCommand validates and executes the requested command in a PTY.
|
||||||
|
func (s *Server) execCommand(ctx context.Context, ch ssh.Channel, requests <-chan *ssh.Request, cmdStr string, ptyW, ptyH uint32, remote, fp string) string {
|
||||||
|
args := strings.Fields(cmdStr)
|
||||||
|
if len(args) == 0 {
|
||||||
|
sendExitStatus(ch, 1)
|
||||||
|
return "rejected"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate: first arg must be "sf" (or the sf-bin basename), second must be an allowed subcommand.
|
||||||
|
sfBase := s.sfBin
|
||||||
|
for i := len(sfBase) - 1; i >= 0; i-- {
|
||||||
|
if sfBase[i] == '/' || sfBase[i] == '\\' {
|
||||||
|
sfBase = sfBase[i+1:]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
start := 0
|
||||||
|
if args[0] == sfBase || args[0] == "sf" {
|
||||||
|
start = 1
|
||||||
|
}
|
||||||
|
if start >= len(args) || !allowedSubcommands[args[start]] {
|
||||||
|
s.logger.Warn("rejected command", "remote", remote, "cmd", cmdStr)
|
||||||
|
fmt.Fprintf(ch, "sf-worker: command not allowed: %q\r\n", cmdStr)
|
||||||
|
sendExitStatus(ch, 1)
|
||||||
|
return "rejected"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the actual command: replace the leading "sf" with the real binary path.
|
||||||
|
execArgs := append([]string{s.sfBin}, args[start:]...)
|
||||||
|
cmd := exec.CommandContext(ctx, execArgs[0], execArgs[1:]...)
|
||||||
|
cmd.Env = append(os.Environ(), "SF_WORKER=1", fmt.Sprintf("SF_WORKER_CLIENT_FP=%s", fp))
|
||||||
|
|
||||||
|
s.logger.Info("exec", "remote", remote, "cmd", execArgs)
|
||||||
|
|
||||||
|
// Start with PTY.
|
||||||
|
ptmx, err := pty.StartWithSize(cmd, &pty.Winsize{
|
||||||
|
Cols: uint16(ptyW),
|
||||||
|
Rows: uint16(ptyH),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
s.logger.Error("pty start failed", "err", err)
|
||||||
|
fmt.Fprintf(ch, "sf-worker: failed to start: %v\r\n", err)
|
||||||
|
sendExitStatus(ch, 1)
|
||||||
|
return "error"
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
_ = ptmx.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Handle subsequent window-change requests in background.
|
||||||
|
go func() {
|
||||||
|
for req := range requests {
|
||||||
|
if req.Type == "window-change" && len(req.Payload) >= 8 {
|
||||||
|
w := uint32(req.Payload[0])<<24 | uint32(req.Payload[1])<<16 | uint32(req.Payload[2])<<8 | uint32(req.Payload[3])
|
||||||
|
h := uint32(req.Payload[4])<<24 | uint32(req.Payload[5])<<16 | uint32(req.Payload[6])<<8 | uint32(req.Payload[7])
|
||||||
|
_ = pty.Setsize(ptmx, &pty.Winsize{Cols: uint16(w), Rows: uint16(h)})
|
||||||
|
}
|
||||||
|
if req.WantReply {
|
||||||
|
_ = req.Reply(false, nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Bidirectional copy: PTY ↔ SSH channel.
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
wg.Add(2)
|
||||||
|
go func() {
|
||||||
|
defer wg.Done()
|
||||||
|
_, _ = io.Copy(ptmx, ch)
|
||||||
|
}()
|
||||||
|
go func() {
|
||||||
|
defer wg.Done()
|
||||||
|
_, _ = io.Copy(ch, ptmx)
|
||||||
|
}()
|
||||||
|
|
||||||
|
err = cmd.Wait()
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
exitCode := 0
|
||||||
|
if err != nil {
|
||||||
|
if exitErr, ok := err.(*exec.ExitError); ok {
|
||||||
|
exitCode = exitErr.ExitCode()
|
||||||
|
} else {
|
||||||
|
exitCode = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sendExitStatus(ch, uint32(exitCode))
|
||||||
|
|
||||||
|
if exitCode != 0 {
|
||||||
|
return "error"
|
||||||
|
}
|
||||||
|
return "ok"
|
||||||
|
}
|
||||||
|
|
||||||
|
// sendExitStatus sends an SSH exit-status request to the channel.
|
||||||
|
func sendExitStatus(ch ssh.Channel, code uint32) {
|
||||||
|
payload := []byte{byte(code >> 24), byte(code >> 16), byte(code >> 8), byte(code)}
|
||||||
|
_, _ = ch.SendRequest("exit-status", false, payload)
|
||||||
|
}
|
||||||
|
|
@ -308,6 +308,198 @@ export const BUNDLED_COST_TABLE = [
|
||||||
outputPer1k: 0.00028,
|
outputPer1k: 0.00028,
|
||||||
updatedAt: "2025-03-15",
|
updatedAt: "2025-03-15",
|
||||||
},
|
},
|
||||||
|
// Gemini preview / future models
|
||||||
|
{
|
||||||
|
id: "gemini-3.1-pro-preview",
|
||||||
|
inputPer1k: 0.00125,
|
||||||
|
outputPer1k: 0.005,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "gemini-3.1-flash-lite-preview",
|
||||||
|
inputPer1k: 0.0001,
|
||||||
|
outputPer1k: 0.0004,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "gemini-3-pro-preview",
|
||||||
|
inputPer1k: 0.00125,
|
||||||
|
outputPer1k: 0.005,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "gemini-3-flash-preview",
|
||||||
|
inputPer1k: 0.0001,
|
||||||
|
outputPer1k: 0.0004,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "gemini-2.5-flash",
|
||||||
|
inputPer1k: 0.0001,
|
||||||
|
outputPer1k: 0.0004,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "gemini-2.5-flash-lite",
|
||||||
|
inputPer1k: 0.00005,
|
||||||
|
outputPer1k: 0.0002,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
// GLM (ZhipuAI)
|
||||||
|
{
|
||||||
|
id: "glm-4.7",
|
||||||
|
inputPer1k: 0.0006,
|
||||||
|
outputPer1k: 0.0024,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "glm-4.7-flash",
|
||||||
|
inputPer1k: 0,
|
||||||
|
outputPer1k: 0,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "glm-4.7-flashx",
|
||||||
|
inputPer1k: 0.00007,
|
||||||
|
outputPer1k: 0.00028,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "glm-5",
|
||||||
|
inputPer1k: 0.001,
|
||||||
|
outputPer1k: 0.004,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "glm-5-turbo",
|
||||||
|
inputPer1k: 0.0012,
|
||||||
|
outputPer1k: 0.0048,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "glm-5.1",
|
||||||
|
inputPer1k: 0.0014,
|
||||||
|
outputPer1k: 0.0056,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "glm-5v-turbo",
|
||||||
|
inputPer1k: 0.0012,
|
||||||
|
outputPer1k: 0.0048,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
// Qwen (Alibaba)
|
||||||
|
{
|
||||||
|
id: "qwen3-coder:480b",
|
||||||
|
inputPer1k: 0.0004,
|
||||||
|
outputPer1k: 0.0016,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "qwen3-coder-next",
|
||||||
|
inputPer1k: 0.0004,
|
||||||
|
outputPer1k: 0.0016,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "qwen3-next:80b",
|
||||||
|
inputPer1k: 0.0002,
|
||||||
|
outputPer1k: 0.0008,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
// Kimi (Moonshot)
|
||||||
|
{
|
||||||
|
id: "kimi-k2.6",
|
||||||
|
inputPer1k: 0.0006,
|
||||||
|
outputPer1k: 0.0024,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "kimi-for-coding",
|
||||||
|
inputPer1k: 0.0006,
|
||||||
|
outputPer1k: 0.0024,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "kimi-k2-thinking",
|
||||||
|
inputPer1k: 0.001,
|
||||||
|
outputPer1k: 0.004,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
// MiniMax
|
||||||
|
{
|
||||||
|
id: "MiniMax-M2.7",
|
||||||
|
inputPer1k: 0.0006,
|
||||||
|
outputPer1k: 0.0024,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "MiniMax-M2.7-highspeed",
|
||||||
|
inputPer1k: 0.0006,
|
||||||
|
outputPer1k: 0.0024,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
// Mistral versioned variants
|
||||||
|
{
|
||||||
|
id: "devstral-medium-2507",
|
||||||
|
inputPer1k: 0.0004,
|
||||||
|
outputPer1k: 0.002,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "devstral-small-2505",
|
||||||
|
inputPer1k: 0.0001,
|
||||||
|
outputPer1k: 0.0003,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "labs-devstral-small-2512",
|
||||||
|
inputPer1k: 0.0001,
|
||||||
|
outputPer1k: 0.0003,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "mistral-large-2411",
|
||||||
|
inputPer1k: 0.002,
|
||||||
|
outputPer1k: 0.006,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "mistral-medium-2505",
|
||||||
|
inputPer1k: 0.0004,
|
||||||
|
outputPer1k: 0.002,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "mistral-medium-2508",
|
||||||
|
inputPer1k: 0.0004,
|
||||||
|
outputPer1k: 0.002,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "mistral-small-2506",
|
||||||
|
inputPer1k: 0.0001,
|
||||||
|
outputPer1k: 0.0006,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "open-mistral-7b",
|
||||||
|
inputPer1k: 0.00025,
|
||||||
|
outputPer1k: 0.00025,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "open-mixtral-8x22b",
|
||||||
|
inputPer1k: 0.002,
|
||||||
|
outputPer1k: 0.006,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "open-mixtral-8x7b",
|
||||||
|
inputPer1k: 0.0007,
|
||||||
|
outputPer1k: 0.0007,
|
||||||
|
updatedAt: "2026-05-01",
|
||||||
|
},
|
||||||
];
|
];
|
||||||
/**
|
/**
|
||||||
* Lookup cost for a model ID. Returns undefined if not found.
|
* Lookup cost for a model ID. Returns undefined if not found.
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@
|
||||||
import { getProviderCapabilities } from "@singularity-forge/ai";
|
import { getProviderCapabilities } from "@singularity-forge/ai";
|
||||||
import { getToolCompatibility } from "@singularity-forge/coding-agent";
|
import { getToolCompatibility } from "@singularity-forge/coding-agent";
|
||||||
import { tierOrdinal } from "./complexity-classifier.js";
|
import { tierOrdinal } from "./complexity-classifier.js";
|
||||||
|
import { lookupModelCost } from "./model-cost-table.js";
|
||||||
// ─── Known Model Tiers ───────────────────────────────────────────────────────
|
// ─── Known Model Tiers ───────────────────────────────────────────────────────
|
||||||
// Maps known model IDs to their capability tier. Used when tier_models is not
|
// Maps known model IDs to their capability tier. Used when tier_models is not
|
||||||
// explicitly configured to pick the best available model for each tier.
|
// explicitly configured to pick the best available model for each tier.
|
||||||
|
|
@ -93,87 +94,6 @@ export const MODEL_CAPABILITY_TIER = {
|
||||||
"open-mixtral-8x22b": "heavy",
|
"open-mixtral-8x22b": "heavy",
|
||||||
"pixtral-large-latest": "heavy",
|
"pixtral-large-latest": "heavy",
|
||||||
};
|
};
|
||||||
// ─── Cost Table (per 1K input tokens, approximate USD) ───────────────────────
|
|
||||||
// Used for cross-provider cost comparison when multiple providers offer
|
|
||||||
// the same capability tier.
|
|
||||||
const MODEL_COST_PER_1K_INPUT = {
|
|
||||||
"claude-haiku-4-5": 0.0008,
|
|
||||||
"claude-3-5-haiku-latest": 0.0008,
|
|
||||||
"claude-sonnet-4-6": 0.003,
|
|
||||||
"claude-sonnet-4-5-20250514": 0.003,
|
|
||||||
"claude-opus-4-6": 0.015,
|
|
||||||
"gpt-4o-mini": 0.00015,
|
|
||||||
"gpt-4o": 0.0025,
|
|
||||||
"gpt-4.1": 0.002,
|
|
||||||
"gpt-4.1-mini": 0.0004,
|
|
||||||
"gpt-4.1-nano": 0.0001,
|
|
||||||
"gpt-5": 0.01,
|
|
||||||
"gpt-5-mini": 0.0003,
|
|
||||||
"gpt-5-nano": 0.0001,
|
|
||||||
"gpt-5-pro": 0.015,
|
|
||||||
"gpt-5.1": 0.005,
|
|
||||||
"gpt-5.1-codex-max": 0.003,
|
|
||||||
"gpt-5.1-codex-mini": 0.0003,
|
|
||||||
"gpt-5.2": 0.005,
|
|
||||||
"gpt-5.2-codex": 0.005,
|
|
||||||
"gpt-5.3-codex": 0.005,
|
|
||||||
"gpt-5.3-codex-spark": 0.0003,
|
|
||||||
"gpt-5.4": 0.005,
|
|
||||||
"gpt-5.4-mini": 0.00075,
|
|
||||||
"gpt-5.5": 0.005,
|
|
||||||
"o4-mini": 0.005,
|
|
||||||
"o4-mini-deep-research": 0.005,
|
|
||||||
"gemini-2.0-flash": 0.0001,
|
|
||||||
"gemini-2.5-pro": 0.00125,
|
|
||||||
"gemini-3.1-pro-preview": 0.00125,
|
|
||||||
"gemini-3.1-flash-lite-preview": 0.0001,
|
|
||||||
"gemini-3-pro-preview": 0.00125,
|
|
||||||
"gemini-3-flash-preview": 0.0001,
|
|
||||||
"gemini-2.5-flash": 0.0001,
|
|
||||||
"gemini-2.5-flash-lite": 0.00005,
|
|
||||||
"deepseek-chat": 0.00014,
|
|
||||||
"glm-4.7": 0.0006,
|
|
||||||
"glm-4.7-flash": 0,
|
|
||||||
"glm-4.7-flashx": 0.00007,
|
|
||||||
"glm-5": 0.001,
|
|
||||||
"glm-5-turbo": 0.0012,
|
|
||||||
"glm-5.1": 0.0014,
|
|
||||||
"glm-5v-turbo": 0.0012,
|
|
||||||
"qwen3-coder:480b": 0.0004,
|
|
||||||
"qwen3-coder-next": 0.0004,
|
|
||||||
"qwen3-next:80b": 0.0002,
|
|
||||||
"kimi-k2.6": 0.0006,
|
|
||||||
"kimi-for-coding": 0.0006,
|
|
||||||
"kimi-k2-thinking": 0.001,
|
|
||||||
"MiniMax-M2.7": 0.0006,
|
|
||||||
"MiniMax-M2.7-highspeed": 0.0006,
|
|
||||||
"codestral-latest": 0.0003,
|
|
||||||
"devstral-2512": 0.0004,
|
|
||||||
"devstral-medium-2507": 0.0004,
|
|
||||||
"devstral-medium-latest": 0.0004,
|
|
||||||
"devstral-small-2505": 0.0001,
|
|
||||||
"devstral-small-2507": 0.0001,
|
|
||||||
"labs-devstral-small-2512": 0.0001,
|
|
||||||
"magistral-medium-latest": 0.002,
|
|
||||||
"magistral-small": 0.0005,
|
|
||||||
"ministral-3b-latest": 0.00004,
|
|
||||||
"ministral-8b-latest": 0.0001,
|
|
||||||
"mistral-large-2411": 0.002,
|
|
||||||
"mistral-large-2512": 0.0005,
|
|
||||||
"mistral-large-latest": 0.0005,
|
|
||||||
"mistral-medium-2505": 0.0004,
|
|
||||||
"mistral-medium-2508": 0.0004,
|
|
||||||
"mistral-medium-latest": 0.0004,
|
|
||||||
"mistral-nemo": 0.00015,
|
|
||||||
"mistral-small-2506": 0.0001,
|
|
||||||
"mistral-small-2603": 0.00015,
|
|
||||||
"mistral-small-latest": 0.00015,
|
|
||||||
"open-mistral-7b": 0.00025,
|
|
||||||
"open-mixtral-8x22b": 0.002,
|
|
||||||
"open-mixtral-8x7b": 0.0007,
|
|
||||||
"pixtral-12b": 0.00015,
|
|
||||||
"pixtral-large-latest": 0.002,
|
|
||||||
};
|
|
||||||
// ─── Capability Profiles Data Table ──────────────────────────────────────────
|
// ─── Capability Profiles Data Table ──────────────────────────────────────────
|
||||||
// Per-model capability profiles (0–100 scale). Used for capability-aware
|
// Per-model capability profiles (0–100 scale). Used for capability-aware
|
||||||
// model selection within an eligible tier set.
|
// model selection within an eligible tier set.
|
||||||
|
|
@ -1067,8 +987,8 @@ export function scoreEligibleModels(
|
||||||
scored.sort((a, b) => {
|
scored.sort((a, b) => {
|
||||||
const scoreDiff = b.score - a.score;
|
const scoreDiff = b.score - a.score;
|
||||||
if (Math.abs(scoreDiff) > 2) return scoreDiff;
|
if (Math.abs(scoreDiff) > 2) return scoreDiff;
|
||||||
const costA = MODEL_COST_PER_1K_INPUT[a.modelId] ?? Infinity;
|
const costA = lookupModelCost(a.modelId)?.inputPer1k ?? Infinity;
|
||||||
const costB = MODEL_COST_PER_1K_INPUT[b.modelId] ?? Infinity;
|
const costB = lookupModelCost(b.modelId)?.inputPer1k ?? Infinity;
|
||||||
if (costA !== costB) return costA - costB;
|
if (costA !== costB) return costA - costB;
|
||||||
return a.modelId.localeCompare(b.modelId);
|
return a.modelId.localeCompare(b.modelId);
|
||||||
});
|
});
|
||||||
|
|
@ -1121,8 +1041,10 @@ export function getEligibleModels(tier, availableModelIds, routingConfig) {
|
||||||
return availableModelIds
|
return availableModelIds
|
||||||
.filter((id) => getModelTier(id) === tier)
|
.filter((id) => getModelTier(id) === tier)
|
||||||
.sort((a, b) => {
|
.sort((a, b) => {
|
||||||
const costA = getModelCost(a);
|
const costA =
|
||||||
const costB = getModelCost(b);
|
lookupModelCost(canonicalCapabilityModelId(a))?.inputPer1k ?? 999;
|
||||||
|
const costB =
|
||||||
|
lookupModelCost(canonicalCapabilityModelId(b))?.inputPer1k ?? 999;
|
||||||
return costA - costB;
|
return costA - costB;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -1341,20 +1263,6 @@ function isKnownModel(modelId) {
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
function getModelCost(modelId) {
|
|
||||||
const canonicalId = canonicalCapabilityModelId(modelId);
|
|
||||||
if (MODEL_COST_PER_1K_INPUT[canonicalId] !== undefined) {
|
|
||||||
return MODEL_COST_PER_1K_INPUT[canonicalId];
|
|
||||||
}
|
|
||||||
// Check partial matches
|
|
||||||
for (const [knownId, cost] of Object.entries(MODEL_COST_PER_1K_INPUT)) {
|
|
||||||
if (canonicalId.includes(knownId) || knownId.includes(canonicalId)) {
|
|
||||||
return cost;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Unknown cost — assume expensive to avoid routing to unknown cheap models
|
|
||||||
return 999;
|
|
||||||
}
|
|
||||||
// ─── Tool Compatibility Filter (ADR-005 Phase 3) ───────────────────────────
|
// ─── Tool Compatibility Filter (ADR-005 Phase 3) ───────────────────────────
|
||||||
/**
|
/**
|
||||||
* Check if a tool is compatible with a provider's capabilities.
|
* Check if a tool is compatible with a provider's capabilities.
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ import { homedir } from "node:os";
|
||||||
import { join } from "node:path";
|
import { join } from "node:path";
|
||||||
import { getModels, getProviders } from "@singularity-forge/ai";
|
import { getModels, getProviders } from "@singularity-forge/ai";
|
||||||
import { selectByBenchmarks } from "./benchmark-selector.js";
|
import { selectByBenchmarks } from "./benchmark-selector.js";
|
||||||
|
import { classifyError } from "./error-classifier.js";
|
||||||
import { defaultRoutingConfig, MODEL_CAPABILITY_TIER } from "./model-router.js";
|
import { defaultRoutingConfig, MODEL_CAPABILITY_TIER } from "./model-router.js";
|
||||||
import {
|
import {
|
||||||
DEFAULT_RUNAWAY_CHANGED_FILES_WARNING,
|
DEFAULT_RUNAWAY_CHANGED_FILES_WARNING,
|
||||||
|
|
@ -582,16 +583,14 @@ export function getNextFallbackModel(currentModelId, modelConfig) {
|
||||||
* Detect whether an error message indicates a transient network error
|
* Detect whether an error message indicates a transient network error
|
||||||
* (worth retrying the same model) vs a permanent provider error
|
* (worth retrying the same model) vs a permanent provider error
|
||||||
* (auth failure, quota exceeded, etc. -- should fall back immediately).
|
* (auth failure, quota exceeded, etc. -- should fall back immediately).
|
||||||
|
*
|
||||||
|
* Delegates to error-classifier for consistent classification across the
|
||||||
|
* extension. error-classifier is the single source of truth for error triage.
|
||||||
*/
|
*/
|
||||||
export function isTransientNetworkError(errorMsg) {
|
export function isTransientNetworkError(errorMsg) {
|
||||||
if (!errorMsg) return false;
|
if (!errorMsg) return false;
|
||||||
const hasNetworkSignal =
|
const { kind } = classifyError(errorMsg);
|
||||||
/network|ECONNRESET|ETIMEDOUT|ECONNREFUSED|socket hang up|fetch failed|connection.*reset|dns/i.test(
|
return kind === "network" || kind === "connection" || kind === "stream";
|
||||||
errorMsg,
|
|
||||||
);
|
|
||||||
const hasPermanentSignal =
|
|
||||||
/auth|unauthorized|forbidden|invalid.*key|quota|billing/i.test(errorMsg);
|
|
||||||
return hasNetworkSignal && !hasPermanentSignal;
|
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Validate a model ID string.
|
* Validate a model ID string.
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,6 @@
|
||||||
},
|
},
|
||||||
"include": ["src/resources/extensions", "src/types.d.ts"],
|
"include": ["src/resources/extensions", "src/types.d.ts"],
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"src/resources/extensions/vectordrive/tests/**/*.ts",
|
|
||||||
"src/resources/extensions/**/tests/**/*.ts",
|
"src/resources/extensions/**/tests/**/*.ts",
|
||||||
"src/resources/extensions/**/tests/**/*.mjs",
|
"src/resources/extensions/**/tests/**/*.mjs",
|
||||||
"src/tests/**/*.ts",
|
"src/tests/**/*.ts",
|
||||||
|
|
|
||||||
|
|
@ -171,7 +171,6 @@ export default defineConfig({
|
||||||
"src/resources/extensions/github-sync/tests/**/*.test.ts",
|
"src/resources/extensions/github-sync/tests/**/*.test.ts",
|
||||||
"src/resources/extensions/universal-config/tests/**/*.test.ts",
|
"src/resources/extensions/universal-config/tests/**/*.test.ts",
|
||||||
"src/resources/extensions/voice/tests/**/*.test.ts",
|
"src/resources/extensions/voice/tests/**/*.test.ts",
|
||||||
"src/resources/extensions/vectordrive/tests/**/*.test.ts",
|
|
||||||
"src/resources/extensions/mcp-client/tests/**/*.test.ts",
|
"src/resources/extensions/mcp-client/tests/**/*.test.ts",
|
||||||
"src/resources/extensions/async-jobs/*.test.ts",
|
"src/resources/extensions/async-jobs/*.test.ts",
|
||||||
"src/resources/extensions/browser-tools/tests/*.test.mjs",
|
"src/resources/extensions/browser-tools/tests/*.test.mjs",
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue