refactor(retire): drop JSONL fallback from judgment-log + delete one-shot migration scripts

- judgment-log.js: DB is always available; strip appendFileSync/readFileSync
  JSONL fallback paths and resolveJudgmentLogPath export. Non-fatal on DB
  failure is preserved — agent loop must never be disrupted.
- Delete scripts/migrate-to-vitest{,-all}.mjs and fix-vitest-api.mjs —
  one-shot migration tools that have already run; no longer needed.

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
Mikael Hugo 2026-05-09 15:55:10 +02:00
parent a70004cf2a
commit bd0c612993
4 changed files with 10 additions and 465 deletions

View file

@ -1,94 +0,0 @@
#!/usr/bin/env node
/**
* Fix remaining node:test API calls that the initial migration missed.
*
* 1. t.after(() => ...) afterEach(() => ...) (add afterEach to imports if needed)
* 2. t.before(() => ...) beforeEach(() => ...) (add beforeEach to imports if needed)
* 3. await t.test("name", fn) test("name", fn) (flatten subtests)
* 4. t.skip("msg") return ctx.skip("msg") only in describe blocks; most are top-level test() calls
* where t.skip() should become a return or conditional.
*/
import { execSync } from "node:child_process";
import { readFileSync, writeFileSync } from "node:fs";
const files = execSync(
'grep -rl "t\\.after\\b\\|t\\.before\\b\\|t\\.test(\\|t\\.skip(" src/tests/ src/resources/extensions/ --include="*.test.ts" --include="*.test.mjs"',
{ encoding: "utf-8", maxBuffer: 50 * 1024 * 1024 },
)
.trim()
.split("\n")
.filter(Boolean);
console.log(`Fixing ${files.length} files`);
let updated = 0;
for (const file of files) {
let content = readFileSync(file, "utf-8");
let changed = false;
// 1. t.after(() => ...) → afterEach(() => ...)
if (content.includes("t.after(")) {
content = content.replace(/\bt\.after\(/g, "afterEach(");
changed = true;
}
// 2. t.before(() => ...) → beforeEach(() => ...)
if (content.includes("t.before(")) {
content = content.replace(/\bt\.before\(/g, "beforeEach(");
changed = true;
}
// 3. await t.test("name", fn) → test("name", fn)
// and t.test("name", fn) without await
if (content.includes("t.test(")) {
content = content.replace(/await\s+t\.test\(/g, "test(");
content = content.replace(/\bt\.test\(/g, "test(");
changed = true;
}
// 4. t.skip("msg") → remove or comment — these are rare (4 files)
// In most cases t.skip is inside a test callback and means "skip this test".
// Vitest uses test.skip() at declaration time, not runtime.
// Best effort: replace with return + comment
if (content.includes("t.skip(")) {
content = content.replace(/\bt\.skip\(([^)]*)\)/g, "return; // skip: $1");
changed = true;
}
// 5. Ensure afterEach/beforeEach are imported from vitest if used
if (changed) {
const needsAfterEach = content.includes("afterEach(");
const needsBeforeEach = content.includes("beforeEach(");
if (needsAfterEach || needsBeforeEach) {
// Find the vitest import line and extend it
const vitestImportMatch = content.match(
/import\s+\{([^}]+)\}\s+from\s+['"]vitest['"];?/,
);
if (vitestImportMatch) {
const existing = vitestImportMatch[1].split(",").map((s) => s.trim());
const additions = [];
if (needsAfterEach && !existing.includes("afterEach"))
additions.push("afterEach");
if (needsBeforeEach && !existing.includes("beforeEach"))
additions.push("beforeEach");
if (additions.length > 0) {
const all = [...existing, ...additions];
const newImport = `import { ${all.join(", ")} } from 'vitest';`;
content = content.replace(vitestImportMatch[0], newImport);
}
}
}
}
if (changed) {
writeFileSync(file, content, "utf-8");
updated++;
}
}
console.log(`Updated ${updated} files`);

View file

@ -1,138 +0,0 @@
#!/usr/bin/env node
/**
* Migrate ALL test files from node:test to vitest.
*
* Scans src/, packages/, web/, and scripts/.
* Changes:
* 1. Replace `from "node:test"` `from 'vitest'` in all imports
* 2. Files using mock.fn(): replace `mock.fn` `vi.fn` and add `vi` to imports
* 3. Files using mock.timers: migrate to vi fake timers API
*/
import { readdirSync, readFileSync, writeFileSync } from "node:fs";
import { join } from "node:path";
const ROOTS = [
join(process.cwd(), "src"),
join(process.cwd(), "packages"),
join(process.cwd(), "web"),
join(process.cwd(), "scripts"),
];
function collectTestFiles(dirs) {
const results = [];
for (const dir of dirs) {
try {
const entries = readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
const full = join(dir, entry.name);
if (entry.isDirectory()) {
results.push(...collectTestFiles([full]));
} else if (
(entry.name.endsWith(".test.ts") ||
entry.name.endsWith(".test.mjs")) &&
!entry.name.endsWith(".d.ts")
) {
results.push(full);
}
}
} catch {
// Directory may not exist
}
}
return results;
}
function migrateImport(content, { hasMockFn, hasMockTimers }) {
// Case 1: import test from "node:test"; (or single quotes, optional semicolon)
content = content.replace(
/^import test from ["']node:test["'];?$/gm,
"import { test } from 'vitest';",
);
// Case 2: import { ... } from "node:test" (and variants with default import)
content = content.replace(
/import\s+(test,)?\s*\{\s*([^}]+)\}\s+from\s+["']node:test["'];?/g,
(_match, hasDefault, named) => {
const namedList = named
.split(",")
.map((s) => s.trim())
.filter((s) => s !== "mock" && s !== "");
const extra = [];
if (hasMockFn || hasMockTimers) {
extra.push("vi");
}
const allNamed = [...extra, ...namedList.filter((s) => s !== "test")];
const defaultImport = hasDefault ? "test, " : "";
const namedStr = allNamed.join(", ");
return `import { ${defaultImport}${namedStr} } from 'vitest';`;
},
);
return content;
}
const files = collectTestFiles(ROOTS);
console.log(`Found ${files.length} test files`);
let updated = 0;
let errors = 0;
for (const file of files) {
try {
const content = readFileSync(file, "utf-8");
if (
!content.includes('from "node:test"') &&
!content.includes("from 'node:test'")
)
continue;
const hasMockFn = content.includes("mock.fn");
const hasMockTimers = content.includes("mock.timers");
let newContent = migrateImport(content, { hasMockFn, hasMockTimers });
// Migrate mock.fn → vi.fn
if (hasMockFn) {
newContent = newContent.replace(/\bmock\.fn\b/g, "vi.fn");
newContent = newContent.replace(
/ReturnType<typeof mock\.fn>/g,
"ReturnType<typeof vi.fn>",
);
}
// Migrate mock.timers → vi fake timers
if (hasMockTimers) {
newContent = newContent.replace(
/\bmock\.timers\.enable\(\)/g,
"vi.useFakeTimers()",
);
newContent = newContent.replace(
/\bmock\.timers\.tick\(([^)]+)\)/g,
"vi.advanceTimersByTime($1)",
);
newContent = newContent.replace(
/\bmock\.timers\.reset\(\)/g,
"vi.useRealTimers()",
);
}
if (newContent !== content) {
writeFileSync(file, newContent, "utf-8");
updated++;
const rel = file.replace(process.cwd() + "/", "");
const tags = [];
if (hasMockFn) tags.push("mock");
if (hasMockTimers) tags.push("timers");
const tagStr = tags.length ? ` (${tags.join(", ")})` : "";
console.log(` Migrated${tagStr}: ${rel}`);
}
} catch (err) {
console.error(`Error: ${file}: ${err.message}`);
errors++;
}
}
console.log(`Updated ${updated} files, ${errors} errors`);

View file

@ -1,167 +0,0 @@
#!/usr/bin/env node
/**
* Migrate test files from node:test to vitest.
*
* Changes:
* 1. Replace `from "node:test"` `from 'vitest'` in all imports
* 2. Files using mock.fn(): replace `mock.fn` `vi.fn` and add `vi` to imports
* 3. auto-loop.test.ts: replace mock.timers with vi fake timers API
*/
import { readdirSync, readFileSync, writeFileSync } from "node:fs";
import { join } from "node:path";
const ROOT = join(process.cwd(), "src");
const FILES_WITH_MOCK_FN = new Set([
"src/resources/extensions/sf/tests/pre-execution-fail-closed.test.ts",
"src/resources/extensions/sf/tests/pre-execution-pause-wiring.test.ts",
"src/resources/extensions/sf/tests/post-exec-retry-bypass.test.ts",
"src/resources/extensions/sf/tests/validate-milestone-stuck-guard.test.ts",
"src/resources/extensions/sf/tests/claude-import-tui.test.ts",
]);
const AUTO_LOOP_FILE =
"/home/mhugo/code/singularity-forge/src/resources/extensions/sf/tests/auto-loop.test.ts";
function collectTestFiles(dir) {
const results = [];
const entries = readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
const full = join(dir, entry.name);
if (entry.isDirectory()) {
results.push(...collectTestFiles(full));
} else if (
(entry.name.endsWith(".test.ts") || entry.name.endsWith(".test.mjs")) &&
!entry.name.endsWith(".d.ts")
) {
results.push(full);
}
}
return results;
}
function migrateImport(content, { isAutoLoop, isMockFn }) {
// Case: import test from "node:test";
if (isAutoLoop || isMockFn) {
// These files don't have plain default imports (they have named imports too)
// but keep the check for safety
}
if (!content.includes("from 'node:test'")) {
// Replace: import test from "node:test";
content = content.replace(
/^import test from "node:test";$/gm,
"import { test } from 'vitest';",
);
}
if (isAutoLoop) {
// import test, { mock } from "node:test"
// → import { test, vi } from 'vitest'
return content.replace(
/import\s+(test,)?\s*\{\s*([^}]+)\}\s+from\s+"node:test";?/,
(_match, hasDefault, named) => {
const namedList = named
.split(",")
.map((s) => s.trim())
.filter((s) => s !== "mock");
const vitestNamed = [
"vi",
...namedList.filter((s) => s !== "test"),
].join(", ");
const defaultImport = hasDefault ? "test, " : "";
return `import { ${defaultImport}${vitestNamed} } from 'vitest';`;
},
);
} else if (isMockFn) {
// import { ..., mock, ... } from "node:test"
// → import { ..., vi, ... } from 'vitest'
return content.replace(
/import\s+(test,)?\s*\{\s*([^}]+)\}\s+from\s+"node:test";?/,
(_match, hasDefault, named) => {
const namedList = named
.split(",")
.map((s) => s.trim())
.filter((s) => s !== "mock");
const vitestNamed = [
"vi",
...namedList.filter((s) => s !== "test"),
].join(", ");
const defaultImport = hasDefault ? "test, " : "";
return `import { ${defaultImport}${vitestNamed} } from 'vitest';`;
},
);
} else {
// Simple case: just swap the source
return content.replace(
/import\s+(test,)?\s*\{\s*([^}]+)\}\s+from\s+"node:test";?/,
(_match, hasDefault, named) => {
const defaultImport = hasDefault ? "test, " : "";
return `import { ${defaultImport}${named.trim()} } from 'vitest';`;
},
);
}
}
const files = collectTestFiles(ROOT);
console.log(`Found ${files.length} test files`);
let updated = 0;
let errors = 0;
for (const file of files) {
try {
const content = readFileSync(file, "utf-8");
if (!content.includes('from "node:test"')) continue;
let newContent = content;
const isAutoLoop = file === AUTO_LOOP_FILE;
const relPath = file.replace(process.cwd() + "/", "");
const isMockFn = FILES_WITH_MOCK_FN.has(relPath);
// Step 1: migrate the import
newContent = migrateImport(newContent, { isAutoLoop, isMockFn });
// Step 2: migrate mock.fn → vi.fn (for mock-fn files and auto-loop)
if (isAutoLoop || isMockFn) {
newContent = newContent.replace(/\bmock\.fn\b/g, "vi.fn");
newContent = newContent.replace(
/ReturnType<typeof mock\.fn>/g,
"ReturnType<typeof vi.fn>",
);
}
// Step 3: migrate mock.timers (auto-loop only)
if (isAutoLoop) {
// mock.timers.enable() → vi.useFakeTimers()
newContent = newContent.replace(
/\bmock\.timers\.enable\(\)/g,
"vi.useFakeTimers()",
);
// mock.timers.tick(ms) → vi.advanceTimersByTime(ms)
newContent = newContent.replace(
/\bmock\.timers\.tick\(([^)]+)\)/g,
"vi.advanceTimersByTime($1)",
);
// mock.timers.reset() → vi.useRealTimers()
newContent = newContent.replace(
/\bmock\.timers\.reset\(\)/g,
"vi.useRealTimers()",
);
}
if (newContent !== content) {
writeFileSync(file, newContent, "utf-8");
updated++;
const rel = file.replace(process.cwd() + "/", "");
if (isAutoLoop || isMockFn) {
console.log(` Migrated (mock): ${rel}`);
}
}
} catch (err) {
console.error(`Error: ${file}: ${err.message}`);
errors++;
}
}
console.log(`Updated ${updated} files, ${errors} errors`);

View file

@ -1,26 +1,22 @@
/**
* Judgment log records agent decision-making during autonomous mode.
*
* When the agent makes a non-trivial call between alternatives, it logs a
* JudgmentEntry. These accumulate in the SQLite judgments table (schema v40+)
* with fallback to .sf/runtime/judgment-log.jsonl for legacy environments.
*
* Storage: sf.db judgments table (preferred) or sfRuntimeRoot/judgment-log.jsonl (fallback).
* JudgmentEntries accumulate in the SQLite judgments table (schema v40+).
* Storage: sf.db judgments table DB-first, no file fallback.
*
* The tool `sf_log_judgment` (registered in dynamic-tools.ts or equivalent)
* calls appendJudgment(). readJudgmentLog() is used by the compounding step.
*/
import { appendFileSync, existsSync, mkdirSync, readFileSync } from "node:fs";
import { mkdirSync } from "node:fs";
import { join } from "node:path";
import { sfRoot, sfRuntimeRoot } from "./paths.js";
import { sfRoot } from "./paths.js";
import { getJudgmentsForUnit, insertJudgment, openDatabase } from "./sf-db.js";
const JUDGMENT_LOG_SCHEMA_VERSION = 1;
/**
* Append a single judgment entry to the judgment log.
* Prefers SQLite; falls back to JSONL when DB is unavailable.
* Failure is non-fatal silently swallowed so the agent loop is not disrupted.
* Writes to the SF SQLite DB. Non-fatal on DB failure the agent loop must not be disrupted.
*/
export function appendJudgment(basePath, entry) {
const full = {
@ -32,16 +28,6 @@ export function appendJudgment(basePath, entry) {
try {
ensureJudgmentDb(basePath);
insertJudgment(full);
return;
} catch {
// Fall through to JSONL backup
}
// Fallback: JSONL file
try {
const logPath = resolveJudgmentLogPath(basePath);
mkdirSync(join(logPath, ".."), { recursive: true });
appendFileSync(logPath, JSON.stringify(full) + "\n", "utf-8");
} catch {
// Non-fatal — judgment logging must never break the agent loop
}
@ -58,58 +44,16 @@ export function readJudgmentLog(basePath, unitId) {
try {
ensureJudgmentDb(basePath);
const rows = getJudgmentsForUnit(unitId ?? "", 1000);
if (rows.length > 0) {
return rows.map((r) => ({
schemaVersion: JUDGMENT_LOG_SCHEMA_VERSION,
...r,
alternatives: r.alternatives,
}));
}
} catch {
// Fall through to JSONL backup
}
// Fallback: JSONL file
const logPath = resolveJudgmentLogPath(basePath);
if (!existsSync(logPath)) return [];
try {
const raw = readFileSync(logPath, "utf-8");
const entries = [];
for (const line of raw.split("\n")) {
const trimmed = line.trim();
if (!trimmed) continue;
try {
const parsed = normalizeJudgmentEntry(JSON.parse(trimmed));
if (!parsed) continue;
if (unitId && !parsed.unitId.startsWith(unitId)) continue;
entries.push(parsed);
} catch {
// Skip malformed lines
}
}
return entries;
return rows.map((r) => ({
schemaVersion: JUDGMENT_LOG_SCHEMA_VERSION,
...r,
alternatives: r.alternatives,
}));
} catch {
return [];
}
}
function normalizeJudgmentEntry(entry) {
if (!entry || typeof entry !== "object" || Array.isArray(entry)) return null;
const schemaVersion = entry.schemaVersion ?? JUDGMENT_LOG_SCHEMA_VERSION;
if (schemaVersion !== JUDGMENT_LOG_SCHEMA_VERSION) return null;
return {
...entry,
schemaVersion,
};
}
/**
* Resolve the absolute path to the judgment log file.
*/
export function resolveJudgmentLogPath(basePath) {
return join(sfRuntimeRoot(basePath), "judgment-log.jsonl");
}
function ensureJudgmentDb(basePath) {
const dir = sfRoot(basePath);
mkdirSync(dir, { recursive: true });