chore: delete superseded esbuild test-compile scripts

compile-tests.mjs and dist-test-resolve.mjs were for an older esbuild+node
--test approach. The project now uses Vitest end-to-end. Dead code.

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
Mikael Hugo 2026-05-09 16:04:41 +02:00
parent 9df46d2d88
commit 830a259630
3 changed files with 48 additions and 342 deletions

View file

@ -1,243 +0,0 @@
#!/usr/bin/env node
/**
* Compile all TypeScript source + test files to dist-test/ using esbuild.
* Run compiled JS directly with node --test (no per-file TS overhead).
*
* Usage: node scripts/compile-tests.mjs
*/
import { symlinkSync } from "node:fs";
import { cp, mkdir, readdir, readFile, writeFile } from "node:fs/promises";
import { createRequire } from "node:module";
import { join } from "node:path";
import { fileURLToPath } from "node:url";
const __dirname = fileURLToPath(new URL(".", import.meta.url));
const ROOT = join(__dirname, "..");
const require = createRequire(import.meta.url);
const esbuild = require(join(ROOT, "node_modules/esbuild"));
// Recursively collect files by extension (skip node_modules, templates, etc.)
// Directories to skip during file collection
const SKIP_DIRS = new Set([
"node_modules",
"templates",
"__tests__",
"integration",
]);
async function collectFiles(dir, exts = [".ts", ".mjs"]) {
const results = [];
let entries;
try {
entries = await readdir(dir, { withFileTypes: true });
} catch {
return results;
}
for (const entry of entries) {
if (SKIP_DIRS.has(entry.name)) continue;
const full = join(dir, entry.name);
if (entry.isDirectory()) {
results.push(...(await collectFiles(full, exts)));
} else if (
exts.some((ext) => entry.name.endsWith(ext)) &&
!entry.name.endsWith(".d.ts")
) {
results.push(full);
}
}
return results;
}
// Dirs to skip when copying assets (node_modules are never useful in dist-test)
const ASSET_SKIP_DIRS = new Set(["node_modules", "__tests__", "integration"]);
/**
* Recursively copy files from srcDir to destDir.
* Skips node_modules only. Copies everything: .ts/.tsx originals (for jiti),
* .mjs helpers, .md/.yaml/.json assets, etc.
* esbuild compiled .js output already lands in dist-test, so we just
* overlay the asset files on top.
*/
async function copyAssets(srcDir, destDir) {
let entries;
try {
entries = await readdir(srcDir, { withFileTypes: true });
} catch {
return; // directory doesn't exist, nothing to copy
}
for (const entry of entries) {
if (ASSET_SKIP_DIRS.has(entry.name)) continue;
const srcPath = join(srcDir, entry.name);
const destPath = join(destDir, entry.name);
if (entry.isDirectory()) {
await copyAssets(srcPath, destPath);
} else {
await mkdir(destDir, { recursive: true });
await cp(srcPath, destPath, { force: true });
}
}
}
async function main() {
const start = Date.now();
// Collect entry points from src/ and packages/*/src/
const srcFiles = await collectFiles(join(ROOT, "src"));
const packagesDir = join(ROOT, "packages");
const pkgEntries = await readdir(packagesDir, { withFileTypes: true });
const packageFiles = [];
for (const entry of pkgEntries) {
if (!entry.isDirectory()) continue;
const pkgSrc = join(packagesDir, entry.name, "src");
packageFiles.push(...(await collectFiles(pkgSrc)));
}
// Also compile web/lib/ — some tests import from ../../web/lib/
const webLibFiles = await collectFiles(join(ROOT, "web", "lib"));
const entryPoints = [...srcFiles, ...packageFiles, ...webLibFiles];
console.log(`Compiling ${entryPoints.length} files to dist-test/...`);
// bundle:false transforms TypeScript but keeps import specifiers verbatim.
// We post-process the output to rewrite .ts → .js in import strings.
await esbuild.build({
entryPoints,
outdir: join(ROOT, "dist-test"),
outbase: ROOT,
bundle: false,
format: "esm",
platform: "node",
target: "node24",
sourcemap: "inline",
packages: "external",
logLevel: "warning",
});
// Copy non-compiled assets from src/ to dist-test/src/ maintaining structure.
// Tests use import.meta.url to resolve sibling .md, .yaml, .json, .ts etc.
// Also copy original .ts files — jiti-based imports load .ts source directly.
const srcDir = join(ROOT, "src");
const distSrcDir = join(ROOT, "dist-test", "src");
await copyAssets(srcDir, distSrcDir);
console.log("Copied non-TS assets and .ts source files to dist-test/src/");
// Copy packages/*/src/ assets as well
for (const entry of pkgEntries) {
if (!entry.isDirectory()) continue;
const pkgSrc = join(packagesDir, entry.name, "src");
const pkgDistSrc = join(ROOT, "dist-test", "packages", entry.name, "src");
await copyAssets(pkgSrc, pkgDistSrc);
}
// Copy web/lib/ assets (tests import from ../../web/lib/ relative to dist-test/src/tests/)
await copyAssets(
join(ROOT, "web", "lib"),
join(ROOT, "dist-test", "web", "lib"),
);
// Copy web/components/ assets (xterm-theme test reads shell-terminal.tsx via import.meta.dirname)
await copyAssets(
join(ROOT, "web", "components"),
join(ROOT, "dist-test", "web", "components"),
);
// Copy scripts/ non-TS files (.cjs etc) — some tests require() scripts directly
await copyAssets(join(ROOT, "scripts"), join(ROOT, "dist-test", "scripts"));
// Copy root package.json — some tests read it to check version/engines fields
await cp(
join(ROOT, "package.json"),
join(ROOT, "dist-test", "package.json"),
{ force: true },
);
// Copy root dist/ into dist-test/dist/ — some tests compute projectRoot as
// 3 levels up from dist-test/src/tests/ which lands at dist-test/, then
// import from dist package entrypoints etc.
const rootDistDir = join(ROOT, "dist");
const distTestDistDir = join(ROOT, "dist-test", "dist");
await copyAssets(rootDistDir, distTestDistDir);
// Post-process: rewrite .ts import specifiers to .js in all compiled JS files.
// esbuild with bundle:false preserves original specifiers; Node can't load .ts.
const compiledJsFiles = await collectFiles(join(ROOT, "dist-test"), [".js"]);
// Regex matches .ts in from/import() strings but not sourceMappingURL comments
const tsImportRe = /(from\s+["'])(\.\.?\/[^"']*?)\.ts(["'])/g;
const tsDynImportRe = /(import\(["'])(\.\.?\/[^"']*?)\.ts(["'])\)/g;
let rewritten = 0;
await Promise.all(
compiledJsFiles.map(async (file) => {
const src = await readFile(file, "utf-8");
const out = src
.replace(tsImportRe, (_, a, b, c) => `${a}${b}.js${c}`)
.replace(tsDynImportRe, (_, a, b, c) => `${a}${b}.js${c})`);
if (out !== src) {
await writeFile(file, out, "utf-8");
rewritten++;
}
}),
);
if (rewritten > 0) {
console.log(`Rewrote .ts → .js imports in ${rewritten} files`);
}
// Remove stale compiled test files: dist-test entries whose source no longer exists
// in a non-integration source directory (e.g. test moved to integration/).
// Only cleans *.test.js and *.test.ts files to avoid touching non-test outputs.
const { rm } = await import("node:fs/promises");
const { existsSync } = await import("node:fs");
const testDirsToClean = [
[join(ROOT, "dist-test", "src", "tests"), join(ROOT, "src", "tests")],
[
join(ROOT, "dist-test", "src", "resources", "extensions", "sf", "tests"),
join(ROOT, "src", "resources", "extensions", "sf", "tests"),
],
];
let staleCleaned = 0;
for (const [distDir, srcDir] of testDirsToClean) {
let distEntries;
try {
distEntries = await readdir(distDir, { withFileTypes: true });
} catch {
continue;
}
for (const entry of distEntries) {
if (!entry.isFile()) continue;
if (!entry.name.match(/\.test\.(js|ts)$/)) continue;
const stem = entry.name.replace(/\.(js|ts)$/, "");
// Source could be .ts or .mjs (esbuild compiles both to .js)
const hasTsSrc = existsSync(join(srcDir, stem + ".ts"));
const hasMjsSrc = existsSync(join(srcDir, stem + ".mjs"));
if (!hasTsSrc && !hasMjsSrc) {
await rm(join(distDir, entry.name));
staleCleaned++;
}
}
}
if (staleCleaned > 0) {
console.log(
`Removed ${staleCleaned} stale compiled test files from dist-test/`,
);
}
// Ensure dist-test/node_modules exists so resource-loader.ts (which computes
// packageRoot from import.meta.url) resolves sfNodeModules to a real path.
// Without this, initResources creates dangling symlinks in test environments.
const distNodeModules = join(ROOT, "dist-test", "node_modules");
if (!existsSync(distNodeModules)) {
symlinkSync(join(ROOT, "node_modules"), distNodeModules);
}
const elapsed = ((Date.now() - start) / 1000).toFixed(2);
console.log(`Done in ${elapsed}s`);
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View file

@ -1,60 +0,0 @@
/**
* Minimal Node.js import hook for running tests from dist-test/.
*
* esbuild with bundle:false preserves import specifiers verbatim, so compiled
* .js files still import '../foo.ts'. This hook redirects those to '.js' so
* Node can find the compiled output.
*
* Also redirects @sf bare imports to their compiled counterparts in dist-test.
*/
// dist-test root — everything compiled lands here
const DIST_TEST = new URL("../dist-test/", import.meta.url).href;
// Absolute paths to compiled @singularity-forge/* entry points
const SF_ALIASES = {
"@singularity-forge/pi-coding-agent": new URL(
"../dist-test/packages/pi-coding-agent/src/index.js",
import.meta.url,
).href,
"@singularity-forge/pi-ai/oauth": new URL(
"../dist-test/packages/pi-ai/src/utils/oauth/index.js",
import.meta.url,
).href,
"@singularity-forge/pi-ai": new URL(
"../dist-test/packages/pi-ai/src/index.js",
import.meta.url,
).href,
"@singularity-forge/pi-agent-core": new URL(
"../dist-test/packages/pi-agent-core/src/index.js",
import.meta.url,
).href,
"@singularity-forge/pi-tui": new URL(
"../dist-test/packages/pi-tui/src/index.js",
import.meta.url,
).href,
"@singularity-forge/native": new URL(
"../dist-test/packages/native/src/index.js",
import.meta.url,
).href,
};
export function resolve(specifier, context, nextResolve) {
// 1. @singularity-forge/* bare imports → compiled dist-test counterpart
if (specifier in SF_ALIASES) {
return nextResolve(SF_ALIASES[specifier], context);
}
// 2. .ts relative imports inside dist-test → .js
if (
specifier.endsWith(".ts") &&
(specifier.startsWith("./") || specifier.startsWith("../")) &&
context.parentURL &&
context.parentURL.startsWith(DIST_TEST)
) {
const jsSpecifier = specifier.slice(0, -3) + ".js";
return nextResolve(jsSpecifier, context);
}
return nextResolve(specifier, context);
}

View file

@ -3,24 +3,24 @@
*
* Accumulates per-unit usage data across autonomous mode sessions.
* Data is extracted from session entries before each context wipe,
* written to .sf/metrics.json, and surfaced in the dashboard.
* written to the SF SQLite database, and surfaced in the dashboard.
*
* Data flow:
* 1. Before newSession() wipes context, snapshotUnitMetrics() scans
* session entries for AssistantMessage usage data
* 2. The unit record is appended to the in-memory ledger and flushed to disk
* 2. The unit record is upserted to unit_metrics in the project DB
* 3. The dashboard overlay and progress widget read from the in-memory ledger
* 4. On crash recovery or fresh start, the ledger is loaded from disk
* 4. On crash recovery or fresh start, the ledger is loaded from the DB
*/
import { join } from "node:path";
import {
loadJsonFile,
loadJsonFileOrNull,
saveJsonFile,
} from "./json-persistence.js";
import { formatModelIdentity } from "./model-identity.js";
import { sfRuntimeRoot } from "./paths.js";
import { getDatabase } from "./sf-db.js";
import {
getAllUnitMetrics,
getDatabase,
getProjectStartedAt,
pruneUnitMetrics,
setProjectStartedAt,
upsertUnitMetrics,
} from "./sf-db.js";
import { getAndClearSkills } from "./skill-telemetry.js";
import { parseUnitId } from "./unit-id.js";
import { buildAuditEnvelope, emitUokAuditEvent } from "./uok/audit.js";
@ -524,17 +524,6 @@ export function formatCostProjection(
return result;
}
// ─── Disk I/O ─────────────────────────────────────────────────────────────────
function metricsPath(base) {
return join(sfRuntimeRoot(base), "metrics.json");
}
function isMetricsLedger(data) {
return (
typeof data === "object" &&
data !== null &&
data.version === 1 &&
Array.isArray(data.units)
);
}
function defaultLedger() {
return { version: 1, projectStartedAt: Date.now(), units: [] };
}
@ -542,49 +531,60 @@ function defaultLedger() {
* Prune the metrics ledger to at most `keepCount` most-recent unit entries.
*
* Called by the doctor when the ledger exceeds the bloat threshold.
* Keeps the newest entries (highest index = most recent) and discards
* the oldest from the head of the array. Preserves `projectStartedAt`.
* Keeps the newest entries and discards the oldest.
* Preserves `projectStartedAt`.
*
* Updates both the on-disk file and the in-memory ledger if it is loaded,
* Updates both the DB and the in-memory ledger if it is loaded,
* so the current session sees the pruned state immediately.
*
* @returns the number of entries removed, or 0 if no pruning was needed.
*/
export function pruneMetricsLedger(base, keepCount) {
const db = getDatabase();
const disk = loadLedgerFromDisk(base);
if (!disk || disk.units.length <= keepCount) return 0;
const removed = disk.units.length - keepCount;
disk.units = disk.units.slice(-keepCount);
saveJsonFile(metricsPath(base), disk);
// Keep the in-memory ledger in sync if it is loaded for this session.
if (db) {
pruneUnitMetrics(db, keepCount);
}
if (ledger) {
ledger.units = ledger.units.slice(-keepCount);
}
return removed;
}
/**
* Load ledger from disk without initializing in-memory state.
* Load ledger from DB without initializing in-memory state.
* Used by history/export commands outside of autonomous mode.
*/
export function loadLedgerFromDisk(base) {
return loadJsonFileOrNull(metricsPath(base), isMetricsLedger);
const db = getDatabase();
if (!db) return null;
try {
const units = getAllUnitMetrics(db);
const projectStartedAt = getProjectStartedAt(db) ?? Date.now();
return { version: 1, projectStartedAt, units };
} catch {
return null;
}
}
function loadLedger(base) {
const raw = loadJsonFile(metricsPath(base), isMetricsLedger, defaultLedger);
const before = raw.units.length;
raw.units = deduplicateUnits(raw.units);
if (raw.units.length < before) {
// Persist the cleaned ledger so duplicates don't re-accumulate
saveLedger(base, raw);
const db = getDatabase();
if (!db) return defaultLedger();
try {
const rawUnits = getAllUnitMetrics(db);
const units = deduplicateUnits(rawUnits);
const projectStartedAt = getProjectStartedAt(db) ?? Date.now();
return { version: 1, projectStartedAt, units };
} catch {
return defaultLedger();
}
return raw;
}
/**
* Collapse duplicate entries with the same (type, id, startedAt) triple.
* Keeps the entry with the highest finishedAt (the most complete snapshot).
*
* This is a defensive measure against idle-watchdog race conditions that can
* produce duplicate entries on disk despite the in-memory idempotency guard
* produce duplicate entries despite the in-memory idempotency guard
* in snapshotUnitMetrics(). See #1943.
*/
function deduplicateUnits(units) {
@ -599,5 +599,14 @@ function deduplicateUnits(units) {
return Array.from(map.values());
}
function saveLedger(base, data) {
saveJsonFile(metricsPath(base), data);
const db = getDatabase();
if (!db) return;
try {
for (const unit of data.units) {
upsertUnitMetrics(db, unit);
}
setProjectStartedAt(db, data.projectStartedAt);
} catch {
/* non-fatal — DB writes are best-effort during autonomous mode */
}
}