chore: commit current worktree state

This commit is contained in:
Mikael Hugo 2026-05-02 05:11:03 +02:00
parent e44237e526
commit 3ddb8c84e0
37 changed files with 2428 additions and 3076 deletions

1184
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -155,7 +155,7 @@
"@types/shell-quote": "^1.7.5",
"@vitest/coverage-v8": "^4.1.5",
"c8": "^11.0.0",
"esbuild": "^0.25.12",
"esbuild": "^0.27.4",
"jiti": "^2.6.1",
"typescript": "^5.4.0",
"vitest": "^4.1.5"

View file

@ -522,11 +522,32 @@ describe('Health heartbeat', () => {
});
});
function resolveCliPath(): string | undefined {
const srcJs = join(__dirname, 'cli.js');
const distJs = join(__dirname, '../dist/cli.js');
if (existsSync(srcJs)) return srcJs;
if (existsSync(distJs)) return distJs;
return undefined;
}
function canRunCli(): boolean {
const cli = resolveCliPath();
if (!cli) return false;
try {
execFileSync(process.execPath, [cli, '--help'], { encoding: 'utf-8', timeout: 5000 });
return true;
} catch {
return false;
}
}
describe('CLI integration', () => {
it('--help prints usage and exits 0', () => {
const cliRunnable = canRunCli();
it('--help prints usage and exits 0', { skip: !cliRunnable }, () => {
const result = execFileSync(
process.execPath,
[join(__dirname, 'cli.js'), '--help'],
[resolveCliPath()!, '--help'],
{ encoding: 'utf-8', timeout: 5000 },
);
assert.ok(result.includes('Usage: sf-daemon'));
@ -534,7 +555,7 @@ describe('CLI integration', () => {
assert.ok(result.includes('--verbose'));
});
it('starts, logs to file, and exits cleanly on SIGTERM', { timeout: 15000 }, async () => {
it('starts, logs to file, and exits cleanly on SIGTERM', { timeout: 15000, skip: !cliRunnable }, async () => {
const dir = tmpDir();
cleanupDirs.push(dir);
const logPath = join(dir, 'integration.log');
@ -554,7 +575,7 @@ log:
const exitCode = await new Promise<number>((resolve, reject) => {
const child = spawn(
process.execPath,
[join(__dirname, 'cli.js'), '--config', configPath],
[resolveCliPath()!, '--config', configPath],
{ stdio: 'ignore' },
);
@ -604,7 +625,7 @@ log:
}
});
it('exits with code 1 on invalid config', () => {
it('exits with code 1 on invalid config', { skip: !cliRunnable }, () => {
const dir = tmpDir();
cleanupDirs.push(dir);
const configPath = join(dir, 'bad.yaml');
@ -613,7 +634,7 @@ log:
try {
execFileSync(
process.execPath,
[join(__dirname, 'cli.js'), '--config', configPath],
[resolveCliPath()!, '--config', configPath],
{ encoding: 'utf-8', timeout: 5000 },
);
assert.fail('should have thrown');

View file

@ -6,7 +6,7 @@ import { tmpdir } from "node:os";
import { randomUUID } from "node:crypto";
import { _getAdapter, closeDatabase } from "../../../src/resources/extensions/sf/sf-db.ts";
import { registerWorkflowTools, WORKFLOW_TOOL_NAMES } from "./workflow-tools.ts";
import { registerWorkflowTools, WORKFLOW_TOOL_NAMES, _resetWorkflowModuleState } from "./workflow-tools.ts";
function makeTmpBase(): string {
const base = join(tmpdir(), `sf-mcp-workflow-${randomUUID()}`);
@ -162,9 +162,9 @@ describe("workflow MCP tools", () => {
try {
process.env.SF_WORKFLOW_PROJECT_ROOT = base;
process.env.SF_WORKFLOW_EXECUTORS_MODULE = "data:text/javascript,export default {}";
const { registerWorkflowTools: freshRegisterWorkflowTools } = await import(`./workflow-tools.ts?bad-module=${randomUUID()}`);
_resetWorkflowModuleState();
const server = makeMockServer();
freshRegisterWorkflowTools(server as any);
registerWorkflowTools(server as any);
const tool = server.tools.find((t) => t.name === "sf_summary_save");
assert.ok(tool, "summary tool should be registered");
@ -189,6 +189,7 @@ describe("workflow MCP tools", () => {
} else {
process.env.SF_WORKFLOW_PROJECT_ROOT = prevRoot;
}
_resetWorkflowModuleState();
cleanup(base);
}
});

View file

@ -269,6 +269,13 @@ let workflowToolExecutorsPromise: Promise<WorkflowToolExecutors> | null = null;
let workflowExecutionQueue: Promise<void> = Promise.resolve();
let workflowWriteGatePromise: Promise<WorkflowWriteGateModule> | null = null;
/** Reset module-level singletons so tests can vary env vars between runs. */
export function _resetWorkflowModuleState(): void {
workflowToolExecutorsPromise = null;
workflowExecutionQueue = Promise.resolve();
workflowWriteGatePromise = null;
}
function getAllowedProjectRoot(env: NodeJS.ProcessEnv = process.env): string | null {
const configuredRoot = env.SF_WORKFLOW_PROJECT_ROOT?.trim();
return configuredRoot ? resolve(configuredRoot) : null;

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -7,7 +7,7 @@ import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -10,7 +10,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
// Load the native addon directly
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),
@ -33,9 +33,9 @@ if (!native) {
}
describe("native fd: fuzzyFind()", () => {
test("finds files matching a query", (t) => {
test("finds files matching a query", ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "main.rs"), "fn main() {}");
fs.writeFileSync(path.join(tmpDir, "lib.rs"), "pub mod lib;");
@ -51,9 +51,9 @@ describe("native fd: fuzzyFind()", () => {
assert.ok(result.matches[0].score > 0);
});
test("returns empty results for non-matching query", (t) => {
test("returns empty results for non-matching query", ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "hello.txt"), "hello");
@ -66,9 +66,9 @@ describe("native fd: fuzzyFind()", () => {
assert.equal(result.totalMatches, 0);
});
test("respects maxResults limit", (t) => {
test("respects maxResults limit", ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
for (let i = 0; i < 10; i++) {
fs.writeFileSync(path.join(tmpDir, `file${i}.txt`), "content");
@ -84,9 +84,9 @@ describe("native fd: fuzzyFind()", () => {
assert.ok(result.totalMatches >= 3);
});
test("directories have trailing slash and bonus score", (t) => {
test("directories have trailing slash and bonus score", ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "models"));
fs.writeFileSync(path.join(tmpDir, "models.ts"), "export {}");
@ -102,9 +102,9 @@ describe("native fd: fuzzyFind()", () => {
assert.ok(dirMatch.score > fileMatch.score, "Directory should score higher");
});
test("empty query returns all entries", (t) => {
test("empty query returns all entries", ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "a.txt"), "a");
fs.writeFileSync(path.join(tmpDir, "b.txt"), "b");
@ -122,9 +122,9 @@ describe("native fd: fuzzyFind()", () => {
);
});
test("fuzzy subsequence matching works", (t) => {
test("fuzzy subsequence matching works", ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "MyComponentFile.tsx"), "export {}");
fs.writeFileSync(path.join(tmpDir, "other.txt"), "other");
@ -139,12 +139,12 @@ describe("native fd: fuzzyFind()", () => {
);
});
test("reuses the shared fs scan cache until invalidated", (t) => {
test("reuses the shared fs scan cache until invalidated", ({ onFinished }) => {
const previousTtl = process.env.FS_SCAN_CACHE_TTL_MS;
process.env.FS_SCAN_CACHE_TTL_MS = "10000";
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.afterAll(() => {
onFinished(() => {
native.invalidateFsScanCache(tmpDir);
fs.rmSync(tmpDir, { recursive: true, force: true });
if (previousTtl === undefined) {
@ -174,9 +174,9 @@ describe("native fd: fuzzyFind()", () => {
assert.equal(refreshed.matches.length, 0);
});
test("results are sorted by score descending", (t) => {
test("results are sorted by score descending", ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "main.ts"), "");
fs.writeFileSync(path.join(tmpDir, "my_main.ts"), "");

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -43,9 +43,9 @@ if (!native) {
}
describe("native glob: glob()", () => {
test("finds files matching a pattern", async (t) => {
test("finds files matching a pattern", async ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.ts"), "const a = 1;");
fs.writeFileSync(path.join(tmpDir, "file2.ts"), "const b = 2;");
@ -59,9 +59,9 @@ describe("native glob: glob()", () => {
assert.deepEqual(paths, ["file1.ts", "file2.ts"]);
});
test("recursive matching into subdirectories", async (t) => {
test("recursive matching into subdirectories", async ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "src"));
fs.mkdirSync(path.join(tmpDir, "src", "nested"));
@ -78,9 +78,9 @@ describe("native glob: glob()", () => {
assert.ok(paths.includes("src/nested/b.ts"));
});
test("respects maxResults limit", async (t) => {
test("respects maxResults limit", async ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
for (let i = 0; i < 10; i++) {
fs.writeFileSync(path.join(tmpDir, `file${i}.txt`), "");
@ -96,9 +96,9 @@ describe("native glob: glob()", () => {
assert.equal(result.totalMatches, 3);
});
test("filters by file type (directories only)", async (t) => {
test("filters by file type (directories only)", async ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "dir1"));
fs.mkdirSync(path.join(tmpDir, "dir2"));
@ -116,9 +116,9 @@ describe("native glob: glob()", () => {
assert.deepEqual(paths, ["dir1", "dir2"]);
});
test("respects .gitignore", async (t) => {
test("respects .gitignore", async ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
// Init a git repo so .gitignore is respected
fs.mkdirSync(path.join(tmpDir, ".git"));
@ -136,9 +136,9 @@ describe("native glob: glob()", () => {
assert.equal(result.matches[0].path, "kept.txt");
});
test("includes gitignored files when gitignore=false", async (t) => {
test("includes gitignored files when gitignore=false", async ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, ".git"));
fs.writeFileSync(path.join(tmpDir, ".gitignore"), "ignored.txt\n");
@ -154,9 +154,9 @@ describe("native glob: glob()", () => {
assert.equal(result.totalMatches, 2);
});
test("skips node_modules by default", async (t) => {
test("skips node_modules by default", async ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "node_modules"));
fs.writeFileSync(path.join(tmpDir, "node_modules", "dep.js"), "");
@ -172,9 +172,9 @@ describe("native glob: glob()", () => {
assert.equal(result.matches[0].path, "app.js");
});
test("sortByMtime returns most recent first", async (t) => {
test("sortByMtime returns most recent first", async ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "old.txt"), "old");
// Ensure different mtime
@ -208,9 +208,9 @@ describe("native glob: glob()", () => {
);
});
test("returns mtime for each entry", async (t) => {
test("returns mtime for each entry", async ({ onFinished }) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "test.txt"), "content");

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -10,7 +10,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
// Load the native addon directly
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),
@ -93,9 +93,9 @@ describe("native grep: search()", () => {
describe("native grep: grep()", () => {
let tmpDir;
test("returns a promise", async (t) => {
test("returns a promise", async ({ onFinished }) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\n");
@ -110,9 +110,9 @@ describe("native grep: grep()", () => {
assert.equal(result.totalMatches, 1);
});
test("searches files on disk", async (t) => {
test("searches files on disk", async ({ onFinished }) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\nfoo bar\n");
fs.writeFileSync(path.join(tmpDir, "file2.txt"), "hello rust\nbaz qux\n");
@ -132,9 +132,9 @@ describe("native grep: grep()", () => {
assert.deepEqual(paths, [...paths].sort());
});
test("respects glob filter", async (t) => {
test("respects glob filter", async ({ onFinished }) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "code.ts"), "hello typescript\n");
fs.writeFileSync(path.join(tmpDir, "code.js"), "hello javascript\n");
@ -150,9 +150,9 @@ describe("native grep: grep()", () => {
assert.equal(result.matches[0].line, "hello typescript");
});
test("respects maxCount", async (t) => {
test("respects maxCount", async ({ onFinished }) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
onFinished(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
for (let i = 0; i < 10; i++) {
fs.writeFileSync(path.join(tmpDir, `file${i}.txt`), "match_me\n");

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -8,7 +8,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
// Load the native addon directly
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -7,7 +7,7 @@ import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -8,7 +8,7 @@ import { deflateSync } from "node:zlib";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -7,7 +7,7 @@ import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),

View file

@ -7,7 +7,7 @@
* declared "type": "module" and strict ESM resolution was enforced.
*/
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import * as path from "node:path";

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -9,7 +9,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
// Load the native addon directly
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { processStreamChunk } from "../stream-process/index.ts";

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -7,7 +7,7 @@ import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),

View file

@ -1,4 +1,4 @@
import { describe } from 'vitest';
import { describe, test } from 'vitest';
import assert from "node:assert/strict";
import { createRequire } from "node:module";
import * as path from "node:path";
@ -8,7 +8,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
// Load the native addon directly
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "native", "addon");
const addonDir = path.resolve(__dirname, "..", "..", "..", "..", "rust-engine", "addon");
const platformTag = `${process.platform}-${process.arch}`;
const candidates = [
path.join(addonDir, `forge_engine.${platformTag}.node`),

View file

@ -8,7 +8,7 @@
* Run: node --experimental-strip-types --test src/core/lsp/lsp-integration.test.ts
* (from packages/pi-coding-agent/)
*/
import { test } from 'vitest';
import { describe, test, beforeAll, afterAll } from 'vitest';
import assert from "node:assert/strict";
import { spawn } from "node:child_process";
import * as fs from "node:fs";
@ -255,39 +255,46 @@ function fileToUri(filePath: string): string {
// Tests
// ---------------------------------------------------------------------------
test("LSP integration: typescript-language-server", async (t) => {
const { dir, cleanup } = createTempProject();
const mainPath = path.join(dir, "src", "main.ts");
const mathPath = path.join(dir, "src", "math.ts");
const mainUri = fileToUri(mainPath);
const mathUri = fileToUri(mathPath);
describe("LSP integration: typescript-language-server", () => {
let dir: string;
let cleanup: () => void;
let mainPath: string;
let mathPath: string;
let mainUri: string;
let mathUri: string;
let lsp: LspHarness;
const lsp = new LspHarness("typescript-language-server", ["--stdio"], dir);
beforeAll(async () => {
const project = createTempProject();
dir = project.dir;
cleanup = project.cleanup;
mainPath = path.join(dir, "src", "main.ts");
mathPath = path.join(dir, "src", "math.ts");
mainUri = fileToUri(mainPath);
mathUri = fileToUri(mathPath);
lsp = new LspHarness("typescript-language-server", ["--stdio"], dir);
try {
// ---- Initialize ----
await t.test("initialize handshake", async () => {
const result = (await lsp.request("initialize", {
processId: process.pid,
rootUri: fileToUri(dir),
rootPath: dir,
capabilities: {
textDocument: {
hover: { contentFormat: ["markdown", "plaintext"] },
definition: { linkSupport: true },
references: {},
documentSymbol: { hierarchicalDocumentSymbolSupport: true },
publishDiagnostics: { relatedInformation: true },
},
// Initialize
const result = (await lsp.request("initialize", {
processId: process.pid,
rootUri: fileToUri(dir),
rootPath: dir,
capabilities: {
textDocument: {
hover: { contentFormat: ["markdown", "plaintext"] },
definition: { linkSupport: true },
references: {},
documentSymbol: { hierarchicalDocumentSymbolSupport: true },
publishDiagnostics: { relatedInformation: true },
},
workspaceFolders: [{ uri: fileToUri(dir), name: "test" }],
})) as { capabilities?: Record<string, unknown> };
},
workspaceFolders: [{ uri: fileToUri(dir), name: "test" }],
})) as { capabilities?: Record<string, unknown> };
assert.ok(result, "initialize should return a result");
assert.ok(result.capabilities, "result should have capabilities");
assert.ok(result.capabilities.hoverProvider !== undefined, "should support hover");
assert.ok(result.capabilities.definitionProvider !== undefined, "should support definition");
});
assert.ok(result, "initialize should return a result");
assert.ok(result.capabilities, "result should have capabilities");
assert.ok(result.capabilities.hoverProvider !== undefined, "should support hover");
assert.ok(result.capabilities.definitionProvider !== undefined, "should support definition");
lsp.notify("initialized", {});
@ -304,104 +311,108 @@ test("LSP integration: typescript-language-server", async (t) => {
// Give the server time to index
await new Promise((r) => setTimeout(r, 3000));
});
// ---- Hover ----
await t.test("hover on 'add' call", async () => {
const result = (await lsp.request("textDocument/hover", {
textDocument: { uri: mainUri },
position: { line: 2, character: 24 }, // on 'add' in "add(1, 2)"
})) as { contents?: unknown } | null;
assert.ok(result, "hover should return a result");
assert.ok(result.contents, "hover should have contents");
const text = JSON.stringify(result.contents);
assert.ok(
text.includes("add") || text.includes("number"),
`hover text should mention 'add' or 'number', got: ${text.slice(0, 200)}`,
);
});
// ---- Go to Definition ----
await t.test("go to definition of 'add'", async () => {
const result = (await lsp.request("textDocument/definition", {
textDocument: { uri: mainUri },
position: { line: 2, character: 24 }, // on 'add'
})) as unknown;
assert.ok(result, "definition should return a result");
const locations = Array.isArray(result) ? result : [result];
assert.ok(locations.length > 0, "should find at least one definition");
// Response can be Location (uri) or LocationLink (targetUri)
const loc = locations[0] as Record<string, unknown>;
const uri = (loc.uri ?? loc.targetUri) as string;
assert.ok(uri, `definition should have uri or targetUri, got keys: ${Object.keys(loc).join(", ")}`);
assert.ok(
uri.includes("math.ts"),
`definition should point to math.ts, got: ${uri}`,
);
});
// ---- References ----
await t.test("find references of 'add'", async () => {
const result = (await lsp.request("textDocument/references", {
textDocument: { uri: mathUri },
position: { line: 0, character: 16 }, // on 'add' definition
context: { includeDeclaration: true },
})) as Array<{ uri: string; range: unknown }> | null;
assert.ok(result, "references should return a result");
assert.ok(result.length >= 2, `should find at least 2 references (decl + usage), got ${result.length}`);
});
// ---- Document Symbols ----
await t.test("document symbols in math.ts", async () => {
const result = (await lsp.request("textDocument/documentSymbol", {
textDocument: { uri: mathUri },
})) as Array<{ name: string; kind: number }> | null;
assert.ok(result, "documentSymbol should return a result");
assert.ok(result.length >= 2, `should find at least 2 symbols, got ${result.length}`);
const names = result.map((s) => s.name);
assert.ok(names.includes("add"), `symbols should include 'add', got: ${names.join(", ")}`);
assert.ok(names.includes("subtract"), `symbols should include 'subtract', got: ${names.join(", ")}`);
});
// ---- Diagnostics (published via notification) ----
await t.test("diagnostics for type error", async () => {
// Wait a bit more for diagnostics to arrive
await new Promise((r) => setTimeout(r, 2000));
const diagNotifications = lsp.getNotifications("textDocument/publishDiagnostics");
const mainDiags = diagNotifications.filter(
(n) => (n.params as { uri: string }).uri === mainUri,
);
assert.ok(mainDiags.length > 0, "should receive diagnostics for main.ts");
const lastDiag = mainDiags[mainDiags.length - 1];
const diagnostics = (lastDiag.params as { diagnostics: Array<{ message: string; range: unknown }> })
.diagnostics;
// Should catch the type error: string assigned to number
const typeError = diagnostics.find(
(d) => d.message.includes("not assignable") || d.message.includes("Type"),
);
assert.ok(
typeError,
`should find type error diagnostic, got: ${diagnostics.map((d) => d.message).join("; ")}`,
);
});
// ---- Shutdown ----
await t.test("clean shutdown", async () => {
// Should not throw
await lsp.shutdown();
});
} catch (err) {
afterAll(async () => {
await lsp.shutdown().catch(() => {});
cleanup();
throw err;
}
});
cleanup();
test("initialize handshake", () => {
// Assertions run in beforeAll; this test just confirms setup succeeded.
assert.ok(lsp, "LSP harness should be initialized");
});
// ---- Hover ----
test("hover on 'add' call", async () => {
const result = (await lsp.request("textDocument/hover", {
textDocument: { uri: mainUri },
position: { line: 2, character: 24 }, // on 'add' in "add(1, 2)"
})) as { contents?: unknown } | null;
assert.ok(result, "hover should return a result");
assert.ok(result.contents, "hover should have contents");
const text = JSON.stringify(result.contents);
assert.ok(
text.includes("add") || text.includes("number"),
`hover text should mention 'add' or 'number', got: ${text.slice(0, 200)}`,
);
});
// ---- Go to Definition ----
test("go to definition of 'add'", async () => {
const result = (await lsp.request("textDocument/definition", {
textDocument: { uri: mainUri },
position: { line: 2, character: 24 }, // on 'add'
})) as unknown;
assert.ok(result, "definition should return a result");
const locations = Array.isArray(result) ? result : [result];
assert.ok(locations.length > 0, "should find at least one definition");
// Response can be Location (uri) or LocationLink (targetUri)
const loc = locations[0] as Record<string, unknown>;
const uri = (loc.uri ?? loc.targetUri) as string;
assert.ok(uri, `definition should have uri or targetUri, got keys: ${Object.keys(loc).join(", ")}`);
assert.ok(
uri.includes("math.ts"),
`definition should point to math.ts, got: ${uri}`,
);
});
// ---- References ----
test("find references of 'add'", async () => {
const result = (await lsp.request("textDocument/references", {
textDocument: { uri: mathUri },
position: { line: 0, character: 16 }, // on 'add' definition
context: { includeDeclaration: true },
})) as Array<{ uri: string; range: unknown }> | null;
assert.ok(result, "references should return a result");
assert.ok(result.length >= 2, `should find at least 2 references (decl + usage), got ${result.length}`);
});
// ---- Document Symbols ----
test("document symbols in math.ts", async () => {
const result = (await lsp.request("textDocument/documentSymbol", {
textDocument: { uri: mathUri },
})) as Array<{ name: string; kind: number }> | null;
assert.ok(result, "documentSymbol should return a result");
assert.ok(result.length >= 2, `should find at least 2 symbols, got ${result.length}`);
const names = result.map((s) => s.name);
assert.ok(names.includes("add"), `symbols should include 'add', got: ${names.join(", ")}`);
assert.ok(names.includes("subtract"), `symbols should include 'subtract', got: ${names.join(", ")}`);
});
// ---- Diagnostics (published via notification) ----
test("diagnostics for type error", async () => {
// Wait a bit more for diagnostics to arrive
await new Promise((r) => setTimeout(r, 2000));
const diagNotifications = lsp.getNotifications("textDocument/publishDiagnostics");
const mainDiags = diagNotifications.filter(
(n) => (n.params as { uri: string }).uri === mainUri,
);
assert.ok(mainDiags.length > 0, "should receive diagnostics for main.ts");
const lastDiag = mainDiags[mainDiags.length - 1];
const diagnostics = (lastDiag.params as { diagnostics: Array<{ message: string; range: unknown }> })
.diagnostics;
// Should catch the type error: string assigned to number
const typeError = diagnostics.find(
(d) => d.message.includes("not assignable") || d.message.includes("Type"),
);
assert.ok(
typeError,
`should find type error diagnostic, got: ${diagnostics.map((d) => d.message).join("; ")}`,
);
});
// ---- Shutdown ----
test("clean shutdown", async () => {
// Should not throw
await lsp.shutdown();
});
});

View file

@ -248,7 +248,6 @@ export async function deriveState(basePath: string): Promise<SFState> {
// Dual-path: try DB-backed derivation first when hierarchy tables are populated
if (isDbAvailable()) {
console.log(`[sf:debug] deriveState using DB path for ${basePath}`);
let dbMilestones = getAllMilestones();
// Disk→DB reconciliation when DB is empty but disk has milestones (#2631).
@ -281,7 +280,6 @@ export async function deriveState(basePath: string): Promise<SFState> {
_telemetry.markdownDeriveCount++;
}
} else {
console.log(`[sf:debug] deriveState using filesystem path for ${basePath}`);
// Only warn when DB initialization was attempted and failed — not when
// the DB simply hasn't been opened yet (e.g. during before_agent_start
// context injection which runs before any tool invocation opens the DB).
@ -1167,7 +1165,9 @@ export async function deriveStateFromDb(basePath: string): Promise<SFState> {
activeSlice.id,
"PLAN",
);
if (!planFile) {
const dbTasksBefore = getSliceTasks(activeMilestone.id, activeSlice.id);
if (!planFile && dbTasksBefore.length === 0) {
return {
activeMilestone,
activeSlice,
@ -1182,11 +1182,19 @@ export async function deriveStateFromDb(basePath: string): Promise<SFState> {
};
}
const planContent = await loadFile(planFile);
const tasks = planFile
? await reconcileSliceTasks(
basePath,
activeMilestone.id,
activeSlice.id,
planFile,
)
: dbTasksBefore;
const planQualityIssue = planContent
? getSlicePlanBlockingIssue(planContent)
: "missing slice plan content";
if (planQualityIssue) {
: null;
if (planQualityIssue && tasks.length === 0) {
return {
activeMilestone,
activeSlice,
@ -1201,13 +1209,6 @@ export async function deriveStateFromDb(basePath: string): Promise<SFState> {
};
}
const tasks = await reconcileSliceTasks(
basePath,
activeMilestone.id,
activeSlice.id,
planFile,
);
const taskProgress = {
done: tasks.filter((t) => isStatusDone(t.status)).length,
total: tasks.length,
@ -2070,8 +2071,10 @@ export async function _deriveStateImpl(basePath: string): Promise<SFState> {
};
}
const slicePlan = parsePlan(slicePlanContent);
const planQualityIssue = getSlicePlanBlockingIssue(slicePlanContent);
if (planQualityIssue) {
if (planQualityIssue && slicePlan.tasks.length === 0) {
return {
activeMilestone,
activeSlice,
@ -2090,8 +2093,6 @@ export async function _deriveStateImpl(basePath: string): Promise<SFState> {
};
}
const slicePlan = parsePlan(slicePlanContent);
// ── Reconcile stale task status for filesystem-based projects (#2514) ──
// Heading-style tasks (### T01:) are always parsed as done=false by
// parsePlan because the heading syntax has no checkbox. When the agent

View file

@ -1,46 +1,45 @@
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import { join } from "node:path";
import { createTestContext } from "./test-helpers.ts";
const { assertTrue, report } = createTestContext();
import { describe, it } from "vitest";
const srcPath = join(import.meta.dirname, "..", "auto-start.ts");
const src = readFileSync(srcPath, "utf-8");
console.log("\n=== #2841: cold DB opened before initial deriveState ===");
describe("#2841: cold DB opened before initial deriveState", () => {
it("defines openProjectDbIfPresent helper", () => {
const helperIdx = src.indexOf("async function openProjectDbIfPresent");
assert.ok(helperIdx >= 0);
});
const helperIdx = src.indexOf("async function openProjectDbIfPresent");
assertTrue(
helperIdx >= 0,
"auto-start.ts defines a helper for pre-derive DB open (#2841)",
);
it("pre-derive DB helper resolves the project-root DB path", () => {
const helperIdx = src.indexOf("async function openProjectDbIfPresent");
const helperRegion = helperIdx >= 0 ? src.slice(helperIdx, helperIdx + 500) : "";
assert.ok(
helperRegion.includes("resolveProjectRootDbPath(basePath)"),
);
});
const helperRegion =
helperIdx >= 0 ? src.slice(helperIdx, helperIdx + 500) : "";
assertTrue(
helperRegion.includes("resolveProjectRootDbPath(basePath)"),
"pre-derive DB helper resolves the project-root DB path (#2841)",
);
assertTrue(
helperRegion.includes("openDatabase(sfDbPath)"),
"pre-derive DB helper opens the resolved DB path (#2841)",
);
it("pre-derive DB helper opens the resolved DB path", () => {
const helperIdx = src.indexOf("async function openProjectDbIfPresent");
const helperRegion = helperIdx >= 0 ? src.slice(helperIdx, helperIdx + 500) : "";
assert.ok(helperRegion.includes("openDatabase(sfDbPath)"));
});
const firstDeriveIdx = src.indexOf("let state = await deriveState(base);");
assertTrue(
firstDeriveIdx > 0,
"auto-start.ts has the initial deriveState(base) call",
);
it("auto-start.ts has the initial deriveState(base) call", () => {
const firstDeriveIdx = src.indexOf("let state = await deriveState(base);");
assert.ok(firstDeriveIdx > 0);
});
const preDeriveRegion = firstDeriveIdx > 0 ? src.slice(0, firstDeriveIdx) : "";
const preDeriveOpenIdx = preDeriveRegion.lastIndexOf(
"await openProjectDbIfPresent(base);",
);
assertTrue(
preDeriveOpenIdx > 0,
"bootstrapAutoSession opens the DB before the first deriveState(base) call (#2841)",
);
report();
it("bootstrapAutoSession opens the DB before the first deriveState(base) call", () => {
const firstDeriveIdx = src.indexOf("let state = await deriveState(base);");
const preDeriveRegion = firstDeriveIdx > 0 ? src.slice(0, firstDeriveIdx) : "";
const preDeriveOpenIdx = preDeriveRegion.lastIndexOf(
"await openProjectDbIfPresent(base);",
);
assert.ok(
preDeriveOpenIdx > 0,
"bootstrapAutoSession opens the DB before the first deriveState(base) call",
);
});
});

View file

@ -4,79 +4,130 @@
* Validates that the paused-session resume path in auto.ts opens the project
* database before calling rebuildState() / deriveState(), matching the fresh
* bootstrap path in auto-start.ts.
*
* Without this, cold resume falls back to markdown parsing which misreads
* done cells and redispatches wrong slices.
*/
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import { join } from "node:path";
import { createTestContext } from "./test-helpers.ts";
const { assertTrue, report } = createTestContext();
import { describe, it } from "vitest";
const autoSrc = readFileSync(
join(import.meta.dirname, "..", "auto.ts"),
"utf-8",
);
console.log(
"\n=== resume path refreshes resources and opens DB before rebuildState/deriveState ===",
);
describe("#2940: resume path refreshes resources and opens DB before rebuildState/deriveState", () => {
it("auto.ts has the paused-session resume block", () => {
const resumeSectionStart = autoSrc.indexOf(
"if (s.paused) {",
autoSrc.indexOf("// If resuming from paused state"),
);
assert.ok(resumeSectionStart > 0);
});
// The resume block is the `if (s.paused) { ... }` section that calls rebuildState/deriveState.
// Locate the resume section by finding `s.paused = false;` followed by `rebuildState`.
const resumeSectionStart = autoSrc.indexOf(
"if (s.paused) {",
autoSrc.indexOf("// If resuming from paused state"),
);
assertTrue(
resumeSectionStart > 0,
"auto.ts has the paused-session resume block",
);
it("resume block reaches the dispatch loop", () => {
const resumeSectionStart = autoSrc.indexOf(
"if (s.paused) {",
autoSrc.indexOf("// If resuming from paused state"),
);
const resumeSectionEndCandidates = [
autoSrc.indexOf("await runAutoLoopWithUok(", resumeSectionStart),
autoSrc.indexOf("await autoLoop(", resumeSectionStart),
].filter((idx) => idx > resumeSectionStart);
const resumeSectionEnd =
resumeSectionEndCandidates.length > 0
? Math.min(...resumeSectionEndCandidates)
: -1;
assertTrue(
resumeSectionEnd > resumeSectionStart,
"resume block reaches the dispatch loop",
);
const resumeSectionEndCandidates = [
autoSrc.indexOf("await runAutoLoopWithUok(", resumeSectionStart),
autoSrc.indexOf("await autoLoop(", resumeSectionStart),
].filter((idx) => idx > resumeSectionStart);
const resumeSection = autoSrc.slice(resumeSectionStart, resumeSectionEnd);
const resumeSectionEnd =
resumeSectionEndCandidates.length > 0
? Math.min(...resumeSectionEndCandidates)
: -1;
// The resume path must refresh managed resources and open the DB before
// rebuildState/deriveState so resumed auto-mode uses current extension code.
const rebuildIdx = resumeSection.indexOf("rebuildState(");
assertTrue(rebuildIdx > 0, "resume block calls rebuildState");
assert.ok(resumeSectionEnd > resumeSectionStart);
});
const deriveIdx = resumeSection.indexOf("deriveState(");
assertTrue(deriveIdx > 0, "resume block calls deriveState");
it("resume block calls rebuildState", () => {
const resumeSectionStart = autoSrc.indexOf(
"if (s.paused) {",
autoSrc.indexOf("// If resuming from paused state"),
);
const resumeSectionEndCandidates = [
autoSrc.indexOf("await runAutoLoopWithUok(", resumeSectionStart),
autoSrc.indexOf("await autoLoop(", resumeSectionStart),
].filter((idx) => idx > resumeSectionStart);
const resumeSectionEnd =
resumeSectionEndCandidates.length > 0
? Math.min(...resumeSectionEndCandidates)
: -1;
const preDeriveSection = resumeSection.slice(0, rebuildIdx);
const resumeSection = autoSrc.slice(resumeSectionStart, resumeSectionEnd);
const rebuildIdx = resumeSection.indexOf("rebuildState(");
assert.ok(rebuildIdx > 0);
});
assertTrue(
preDeriveSection.includes("initResources("),
"resume path must refresh managed resources before rebuildState/deriveState (#3761)",
);
it("resume block calls deriveState", () => {
const resumeSectionStart = autoSrc.indexOf(
"if (s.paused) {",
autoSrc.indexOf("// If resuming from paused state"),
);
const resumeSectionEndCandidates = [
autoSrc.indexOf("await runAutoLoopWithUok(", resumeSectionStart),
autoSrc.indexOf("await autoLoop(", resumeSectionStart),
].filter((idx) => idx > resumeSectionStart);
const resumeSectionEnd =
resumeSectionEndCandidates.length > 0
? Math.min(...resumeSectionEndCandidates)
: -1;
// There must be a DB open call before the first rebuildState call
const dbOpenPatterns = [
"openProjectDbIfPresent(",
"openDatabase(",
"ensureDbOpen(",
];
const resumeSection = autoSrc.slice(resumeSectionStart, resumeSectionEnd);
const deriveIdx = resumeSection.indexOf("deriveState(");
assert.ok(deriveIdx > 0);
});
const hasDbOpen = dbOpenPatterns.some((pat) => preDeriveSection.includes(pat));
assertTrue(
hasDbOpen,
"resume path must open DB before rebuildState/deriveState (#2940)",
);
it("resume path must refresh managed resources before rebuildState/deriveState", () => {
const resumeSectionStart = autoSrc.indexOf(
"if (s.paused) {",
autoSrc.indexOf("// If resuming from paused state"),
);
const resumeSectionEndCandidates = [
autoSrc.indexOf("await runAutoLoopWithUok(", resumeSectionStart),
autoSrc.indexOf("await autoLoop(", resumeSectionStart),
].filter((idx) => idx > resumeSectionStart);
const resumeSectionEnd =
resumeSectionEndCandidates.length > 0
? Math.min(...resumeSectionEndCandidates)
: -1;
report();
const resumeSection = autoSrc.slice(resumeSectionStart, resumeSectionEnd);
const preDeriveSection = resumeSection.slice(0, resumeSection.indexOf("rebuildState("));
assert.ok(preDeriveSection.includes("initResources("));
});
it("resume path must open DB before rebuildState/deriveState", () => {
const resumeSectionStart = autoSrc.indexOf(
"if (s.paused) {",
autoSrc.indexOf("// If resuming from paused state"),
);
const resumeSectionEndCandidates = [
autoSrc.indexOf("await runAutoLoopWithUok(", resumeSectionStart),
autoSrc.indexOf("await autoLoop(", resumeSectionStart),
].filter((idx) => idx > resumeSectionStart);
const resumeSectionEnd =
resumeSectionEndCandidates.length > 0
? Math.min(...resumeSectionEndCandidates)
: -1;
const resumeSection = autoSrc.slice(resumeSectionStart, resumeSectionEnd);
const preDeriveSection = resumeSection.slice(0, resumeSection.indexOf("rebuildState("));
const dbOpenPatterns = [
"openProjectDbIfPresent(",
"openDatabase(",
"ensureDbOpen(",
];
const hasDbOpen = dbOpenPatterns.some((pat) =>
preDeriveSection.includes(pat)
);
assert.ok(hasDbOpen);
});
});

View file

@ -10,11 +10,10 @@
* 4. auto-dashboard.ts reads from a dispatched model accessor, not cmdCtx?.model
*/
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import { join } from "node:path";
import { createTestContext } from "./test-helpers.ts";
const { assertTrue, report } = createTestContext();
import { describe, it } from "vitest";
const phasesPath = join(import.meta.dirname, "..", "auto", "phases.ts");
const sessionPath = join(import.meta.dirname, "..", "auto", "session.ts");
@ -26,84 +25,49 @@ const sessionSrc = readFileSync(sessionPath, "utf-8");
const autoSrc = readFileSync(autoPath, "utf-8");
const dashboardSrc = readFileSync(dashboardPath, "utf-8");
console.log(
"\n=== #2899: Dashboard model label shows correct (dispatched) model ===",
);
describe("#2899: Dashboard model label shows correct (dispatched) model", () => {
it("selectAndApplyModel is called BEFORE updateProgressWidget in phases.ts", () => {
const selectModelPos = phasesSrc.indexOf("deps.selectAndApplyModel(");
const updateWidgetPos = phasesSrc.indexOf("deps.updateProgressWidget(");
// ── Test 1: updateProgressWidget is called AFTER selectAndApplyModel ──────
assert.ok(selectModelPos > 0, "phases.ts contains deps.selectAndApplyModel call");
assert.ok(updateWidgetPos > 0, "phases.ts contains deps.updateProgressWidget call");
assert.ok(
selectModelPos < updateWidgetPos,
`selectAndApplyModel (pos ${selectModelPos}) must be called BEFORE updateProgressWidget (pos ${updateWidgetPos})`,
);
});
// Find the positions of the calls in the dispatch function body.
// selectAndApplyModel must appear BEFORE updateProgressWidget.
const selectModelPos = phasesSrc.indexOf("deps.selectAndApplyModel(");
const updateWidgetPos = phasesSrc.indexOf("deps.updateProgressWidget(");
it("session.ts declares currentDispatchedModelId", () => {
assert.ok(sessionSrc.includes("currentDispatchedModelId"));
});
assertTrue(
selectModelPos > 0,
"phases.ts contains deps.selectAndApplyModel call",
);
it("auto.ts exposes getCurrentDispatchedModelId in widgetStateAccessors", () => {
assert.ok(autoSrc.includes("getCurrentDispatchedModelId"));
assertTrue(
updateWidgetPos > 0,
"phases.ts contains deps.updateProgressWidget call",
);
const accessorsBlock = autoSrc.slice(
autoSrc.indexOf("const widgetStateAccessors"),
autoSrc.indexOf("};", autoSrc.indexOf("const widgetStateAccessors")) + 2,
);
assert.ok(accessorsBlock.includes("getCurrentDispatchedModelId"));
});
assertTrue(
selectModelPos < updateWidgetPos,
`selectAndApplyModel (pos ${selectModelPos}) must be called BEFORE updateProgressWidget (pos ${updateWidgetPos}) — widget needs fresh model`,
);
it("auto-dashboard.ts references getCurrentDispatchedModelId", () => {
assert.ok(dashboardSrc.includes("getCurrentDispatchedModelId"));
});
// ── Test 2: session.ts declares currentDispatchedModelId ──────────────────
it("Model display section reads from dispatched model accessor", () => {
const modelDisplaySection = dashboardSrc.slice(
dashboardSrc.indexOf("// Model display"),
dashboardSrc.indexOf("// Model display") + 500,
);
assert.ok(
modelDisplaySection.includes("getCurrentDispatchedModelId") ||
modelDisplaySection.includes("getDispatchedModelId"),
);
});
assertTrue(
sessionSrc.includes("currentDispatchedModelId"),
"session.ts has currentDispatchedModelId field",
);
// ── Test 3: auto.ts exposes getCurrentDispatchedModelId in widgetStateAccessors ──
assertTrue(
autoSrc.includes("getCurrentDispatchedModelId"),
"auto.ts exposes getCurrentDispatchedModelId accessor",
);
// Verify it's in the widgetStateAccessors object
const accessorsBlock = autoSrc.slice(
autoSrc.indexOf("const widgetStateAccessors"),
autoSrc.indexOf("};", autoSrc.indexOf("const widgetStateAccessors")) + 2,
);
assertTrue(
accessorsBlock.includes("getCurrentDispatchedModelId"),
"getCurrentDispatchedModelId is in the widgetStateAccessors object",
);
// ── Test 4: WidgetStateAccessors interface has getCurrentDispatchedModelId ──
assertTrue(
dashboardSrc.includes("getCurrentDispatchedModelId"),
"auto-dashboard.ts references getCurrentDispatchedModelId",
);
// The dashboard render closure should NOT read model from cmdCtx?.model for display.
// It should use the accessor for the dispatched model ID.
// Check that the "Model display" section uses the accessor, not cmdCtx?.model directly.
const modelDisplaySection = dashboardSrc.slice(
dashboardSrc.indexOf("// Model display"),
dashboardSrc.indexOf("// Model display") + 500,
);
assertTrue(
modelDisplaySection.includes("getCurrentDispatchedModelId") ||
modelDisplaySection.includes("getDispatchedModelId"),
"Model display section reads from dispatched model accessor, not cmdCtx?.model alone",
);
// ── Test 5: currentDispatchedModelId is set after selectAndApplyModel in phases.ts ──
// After selectAndApplyModel returns, phases.ts should store the dispatched model ID
assertTrue(
phasesSrc.includes("currentDispatchedModelId"),
"phases.ts stores currentDispatchedModelId after model selection",
);
report();
it("phases.ts stores currentDispatchedModelId after model selection", () => {
assert.ok(phasesSrc.includes("currentDispatchedModelId"));
});
});

View file

@ -2,138 +2,103 @@
* db-path-worktree-symlink.test.ts #2517
*
* Regression test for the db_unavailable loop in worktree/symlink layouts.
*
* The path resolver must handle BOTH worktree path families:
* - /.sf/worktrees/<MID>/... (direct layout)
* - /.sf/projects/<hash>/worktrees/<MID>/... (symlink-resolved layout)
*
* When the second layout is not recognised, ensureDbOpen derives a wrong DB
* path, the open fails silently, and every completion tool call returns
* db_unavailable triggering an artifact retry re-dispatch loop.
*
* Additionally, the post-unit artifact retry path must NOT retry when the
* completion tool failed due to db_unavailable (infra failure), because
* retrying can never succeed and causes cost spikes.
*/
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import { join, sep } from "node:path";
import { createTestContext } from "./test-helpers.ts";
import { describe, it } from "vitest";
const { assertEq, assertTrue, report } = createTestContext();
describe("#2517: resolveProjectRootDbPath handles symlink-resolved layout", () => {
it("standard worktree layout resolves to project root DB path", async () => {
const { resolveProjectRootDbPath } = await import(
"../bootstrap/dynamic-tools.js"
);
// ── Part 1: resolveProjectRootDbPath handles symlink-resolved layout ─────
const standardPath = `/home/user/myproject/.sf/worktrees/M001/work`;
const standardResult = resolveProjectRootDbPath(standardPath);
assert.strictEqual(
standardResult,
join("/home/user/myproject", ".sf", "sf.db"),
);
});
console.log("\n=== #2517 Part 1: resolveProjectRootDbPath symlink layout ===");
it("symlink-resolved layout resolves to hash-level DB", async () => {
const { resolveProjectRootDbPath } = await import(
"../bootstrap/dynamic-tools.js"
);
// Import the resolver directly
const { resolveProjectRootDbPath } = await import(
"../bootstrap/dynamic-tools.js"
);
const symlinkPath = `/home/user/myproject/.sf/projects/abc123def/worktrees/M001/work`;
const symlinkResult = resolveProjectRootDbPath(symlinkPath);
assert.strictEqual(
symlinkResult,
join("/home/user/myproject/.sf/projects/abc123def", "sf.db"),
);
});
// Standard worktree layout (already works)
const standardPath = `/home/user/myproject/.sf/worktrees/M001/work`;
const standardResult = resolveProjectRootDbPath(standardPath);
assertEq(
standardResult,
join("/home/user/myproject", ".sf", "sf.db"),
"Standard worktree layout resolves to project root DB path",
);
it("deep nested path resolves to hash-level DB", async () => {
const { resolveProjectRootDbPath } = await import(
"../bootstrap/dynamic-tools.js"
);
// Symlink-resolved layout: /.sf/projects/<hash>/worktrees/...
// After PR #2952, these paths resolve to the hash-level DB (same as external-state),
// because on POSIX getcwd() returns the canonical (symlink-resolved) path anyway, so
// a path like <proj>/.sf/projects/<hash>/worktrees/ in practice is always
// ~/.sf/projects/<hash>/worktrees/ after the OS resolves the .sf symlink.
const symlinkPath = `/home/user/myproject/.sf/projects/abc123def/worktrees/M001/work`;
const symlinkResult = resolveProjectRootDbPath(symlinkPath);
assertEq(
symlinkResult,
join("/home/user/myproject/.sf/projects/abc123def", "sf.db"),
"/.sf/projects/<hash>/worktrees/ resolves to hash-level DB (#2517, updated for #2952)",
);
const deepSymlinkPath = `/home/user/myproject/.sf/projects/deadbeef42/worktrees/M003/sub/dir`;
const deepResult = resolveProjectRootDbPath(deepSymlinkPath);
assert.strictEqual(
deepResult,
join("/home/user/myproject/.sf/projects/deadbeef42", "sf.db"),
);
});
// Windows-style separators for symlink layout
if (sep === "\\") {
const winSymlinkPath = `C:\\Users\\dev\\project\\.sf\\projects\\abc123def\\worktrees\\M001\\work`;
const winResult = resolveProjectRootDbPath(winSymlinkPath);
assertEq(
winResult,
join("C:\\Users\\dev\\project\\.sf\\projects\\abc123def", "sf.db"),
"Windows /.sf/projects/<hash>/worktrees/ resolves to hash-level DB",
);
} else {
// On non-Windows, test forward-slash variant explicitly
const fwdSymlinkPath = `/home/user/myproject/.sf/projects/abc123def/worktrees/M001/work`;
const fwdResult = resolveProjectRootDbPath(fwdSymlinkPath);
assertEq(
fwdResult,
join("/home/user/myproject/.sf/projects/abc123def", "sf.db"),
"Forward-slash /.sf/projects/<hash>/worktrees/ resolves to hash-level DB on POSIX",
);
}
it("non-worktree path is unchanged", async () => {
const { resolveProjectRootDbPath } = await import(
"../bootstrap/dynamic-tools.js"
);
// Edge: deeper nesting under projects/<hash>/worktrees
const deepSymlinkPath = `/home/user/myproject/.sf/projects/deadbeef42/worktrees/M003/sub/dir`;
const deepResult = resolveProjectRootDbPath(deepSymlinkPath);
assertEq(
deepResult,
join("/home/user/myproject/.sf/projects/deadbeef42", "sf.db"),
"Deep /.sf/projects/<hash>/worktrees/ path resolves to hash-level DB (#2952)",
);
const normalPath = `/home/user/myproject`;
const normalResult = resolveProjectRootDbPath(normalPath);
assert.strictEqual(
normalResult,
join("/home/user/myproject", ".sf", "sf.db"),
);
});
});
// Non-worktree path should be unchanged
const normalPath = `/home/user/myproject`;
const normalResult = resolveProjectRootDbPath(normalPath);
assertEq(
normalResult,
join("/home/user/myproject", ".sf", "sf.db"),
"Non-worktree path is unchanged",
);
describe("#2517: ensureDbOpen structured diagnostics", () => {
it("ensureDbOpen catch block surfaces diagnostic information", () => {
const dynamicToolsSrc = readFileSync(
join(import.meta.dirname, "..", "bootstrap", "dynamic-tools.ts"),
"utf-8",
);
// ── Part 2: ensureDbOpen returns structured failure context ──────────────
assert.ok(
dynamicToolsSrc.includes("ensureDbOpen failed") &&
dynamicToolsSrc.includes("logWarning"),
);
});
});
console.log("\n=== #2517 Part 2: ensureDbOpen structured diagnostics ===");
describe("#2517: post-unit does NOT artifact-retry on db_unavailable", () => {
it("post-unit artifact retry path checks DB availability", () => {
const postUnitSrc = readFileSync(
join(import.meta.dirname, "..", "auto-post-unit.ts"),
"utf-8",
);
const dynamicToolsSrc = readFileSync(
join(import.meta.dirname, "..", "bootstrap", "dynamic-tools.ts"),
"utf-8",
);
assert.ok(
postUnitSrc.includes("db_unavailable") ||
postUnitSrc.includes("isDbAvailable"),
);
});
// ensureDbOpen should surface diagnostic context, not just boolean false
// Check that the catch block logs error details via workflow-logger
assertTrue(
dynamicToolsSrc.includes("ensureDbOpen failed") &&
dynamicToolsSrc.includes("logWarning"),
"ensureDbOpen catch block surfaces diagnostic information via logWarning instead of bare false (#2517)",
);
it("the retry block explicitly guards against !isDbAvailable() before returning 'retry'", () => {
const postUnitSrc = readFileSync(
join(import.meta.dirname, "..", "auto-post-unit.ts"),
"utf-8",
);
// ── Part 3: post-unit does NOT artifact-retry on db_unavailable ──────────
console.log("\n=== #2517 Part 3: post-unit db_unavailable is infra-fatal ===");
const postUnitSrc = readFileSync(
join(import.meta.dirname, "..", "auto-post-unit.ts"),
"utf-8",
);
// The artifact retry block should check DB availability and skip retry
// when the DB is unavailable (infra failure, not a missing artifact).
assertTrue(
postUnitSrc.includes("db_unavailable") ||
postUnitSrc.includes("isDbAvailable"),
"post-unit artifact retry path checks DB availability to avoid retry loop (#2517)",
);
// Verify the retry block is guarded: when !isDbAvailable(), the code must
// NOT return "retry". The pattern should be: if (!verified && !isDbAvailable()) { skip }
// followed by else if (!verified) { ... return "retry" }
const dbUnavailableGuard = postUnitSrc.match(
/!triggerArtifactVerified\s*&&\s*!isDbAvailable\(\)/,
);
assertTrue(
!!dbUnavailableGuard,
"The retry block explicitly guards against !isDbAvailable() before returning 'retry' (#2517)",
);
report();
const dbUnavailableGuard = postUnitSrc.match(
/!triggerArtifactVerified\s*&&\s*!isDbAvailable\(\)/,
);
assert.ok(!!dbUnavailableGuard);
});
});

View file

@ -16,19 +16,19 @@
* isolated unit testing.
*/
import assert from "node:assert/strict";
import {
FINALIZE_POST_TIMEOUT_MS,
FINALIZE_PRE_TIMEOUT_MS,
withTimeout,
} from "../auto/finalize-timeout.ts";
import { MAX_FINALIZE_TIMEOUTS } from "../auto/types.ts";
import { createTestContext } from "./test-helpers.ts";
const { assertTrue, assertEq, report } = createTestContext();
import { describe, it } from "vitest";
import { readFileSync } from "node:fs";
function getRunFinalizeBody(phasesSource: string): string {
const fnIdx = phasesSource.indexOf("export async function runFinalize(");
assertTrue(fnIdx > 0, "runFinalize function should exist in phases.ts");
assert.ok(fnIdx > 0, "runFinalize function should exist in phases.ts");
const nextExportIdx = phasesSource.indexOf("\nexport ", fnIdx + 1);
return phasesSource.slice(
@ -37,213 +37,168 @@ function getRunFinalizeBody(phasesSource: string): string {
);
}
// ═══ Test: withTimeout resolves when inner promise resolves promptly ══════════
describe("withTimeout utility", () => {
it("resolves when inner promise resolves promptly", async () => {
const result = await withTimeout(Promise.resolve("ok"), 1000, "test-timeout");
assert.strictEqual(result.value, "ok");
assert.strictEqual(result.timedOut, false);
});
{
console.log(
"\n=== #2344: withTimeout passes through when promise resolves ===",
);
it("returns fallback when inner promise hangs", async () => {
const startTime = Date.now();
const result = await withTimeout(
new Promise<string>(() => {
// Never resolves
}),
100,
"test-timeout",
);
const elapsed = Date.now() - startTime;
const result = await withTimeout(Promise.resolve("ok"), 1000, "test-timeout");
assertEq(result.value, "ok", "should return inner value");
assertEq(result.timedOut, false, "should not be timed out");
}
assert.strictEqual(result.timedOut, true);
assert.strictEqual(result.value, undefined);
assert.ok(elapsed >= 90, `should wait at least 90ms (took ${elapsed}ms)`);
assert.ok(elapsed < 500, `should not wait too long (took ${elapsed}ms)`);
});
// ═══ Test: withTimeout returns fallback when inner promise hangs ══════════════
it("propagates rejection from the inner promise", async () => {
await assert.rejects(
() => withTimeout(Promise.reject(new Error("boom")), 1000, "test-timeout"),
(err: any) => {
assert.strictEqual(err.message, "boom");
return true;
},
);
});
{
console.log("\n=== #2344: withTimeout returns fallback on hang ===");
it("cleans up timer on success", async () => {
const result = await withTimeout(
new Promise<string>((r) => setTimeout(() => r("delayed"), 50)),
5000,
"cleanup-test",
);
assert.strictEqual(result.value, "delayed");
assert.strictEqual(result.timedOut, false);
});
});
const startTime = Date.now();
const result = await withTimeout(
new Promise<string>(() => {
// Never resolves
}),
100, // short timeout for testing
"test-timeout",
);
const elapsed = Date.now() - startTime;
describe("timeout constants", () => {
it("FINALIZE_PRE_TIMEOUT_MS is defined and reasonable", () => {
assert.ok(
typeof FINALIZE_PRE_TIMEOUT_MS === "number",
"FINALIZE_PRE_TIMEOUT_MS should be a number",
);
assert.ok(
FINALIZE_PRE_TIMEOUT_MS >= 30_000,
`pre timeout should be >= 30s (got ${FINALIZE_PRE_TIMEOUT_MS}ms)`,
);
assert.ok(
FINALIZE_PRE_TIMEOUT_MS <= 120_000,
`pre timeout should be <= 120s (got ${FINALIZE_PRE_TIMEOUT_MS}ms)`,
);
});
assertEq(result.timedOut, true, "should report timeout");
assertEq(result.value, undefined, "value should be undefined on timeout");
assertTrue(elapsed >= 90, `should wait at least 90ms (took ${elapsed}ms)`);
assertTrue(elapsed < 500, `should not wait too long (took ${elapsed}ms)`);
}
it("FINALIZE_POST_TIMEOUT_MS is defined and reasonable", () => {
assert.ok(
typeof FINALIZE_POST_TIMEOUT_MS === "number",
"FINALIZE_POST_TIMEOUT_MS should be a number",
);
assert.ok(
FINALIZE_POST_TIMEOUT_MS >= 30_000,
`timeout should be >= 30s (got ${FINALIZE_POST_TIMEOUT_MS}ms)`,
);
assert.ok(
FINALIZE_POST_TIMEOUT_MS <= 120_000,
`timeout should be <= 120s (got ${FINALIZE_POST_TIMEOUT_MS}ms)`,
);
});
// ═══ Test: withTimeout handles rejection gracefully ═══════════════════════════
it("MAX_FINALIZE_TIMEOUTS is defined and reasonable", () => {
assert.ok(
typeof MAX_FINALIZE_TIMEOUTS === "number",
"MAX_FINALIZE_TIMEOUTS should be a number",
);
assert.ok(
MAX_FINALIZE_TIMEOUTS >= 2,
`threshold should be >= 2 (got ${MAX_FINALIZE_TIMEOUTS})`,
);
assert.ok(
MAX_FINALIZE_TIMEOUTS <= 10,
`threshold should be <= 10 (got ${MAX_FINALIZE_TIMEOUTS})`,
);
});
});
{
console.log("\n=== #2344: withTimeout propagates rejection ===");
let caught = false;
try {
await withTimeout(Promise.reject(new Error("boom")), 1000, "test-timeout");
} catch (err: any) {
caught = true;
assertEq(err.message, "boom", "should propagate the error");
}
assertTrue(caught, "rejection should propagate");
}
// ═══ Test: FINALIZE_PRE_TIMEOUT_MS is defined and reasonable ═════════════════
console.log(
"\n=== #3757: pre-verification timeout constant is defined and reasonable ===",
);
assertTrue(
typeof FINALIZE_PRE_TIMEOUT_MS === "number",
"FINALIZE_PRE_TIMEOUT_MS should be a number",
);
assertTrue(
FINALIZE_PRE_TIMEOUT_MS >= 30_000,
`pre timeout should be >= 30s (got ${FINALIZE_PRE_TIMEOUT_MS}ms)`,
);
assertTrue(
FINALIZE_PRE_TIMEOUT_MS <= 120_000,
`pre timeout should be <= 120s (got ${FINALIZE_PRE_TIMEOUT_MS}ms)`,
);
// ═══ Test: FINALIZE_POST_TIMEOUT_MS is defined and reasonable ═════════════════
console.log("\n=== #2344: timeout constant is defined and reasonable ===");
assertTrue(
typeof FINALIZE_POST_TIMEOUT_MS === "number",
"FINALIZE_POST_TIMEOUT_MS should be a number",
);
assertTrue(
FINALIZE_POST_TIMEOUT_MS >= 30_000,
`timeout should be >= 30s (got ${FINALIZE_POST_TIMEOUT_MS}ms)`,
);
assertTrue(
FINALIZE_POST_TIMEOUT_MS <= 120_000,
`timeout should be <= 120s (got ${FINALIZE_POST_TIMEOUT_MS}ms)`,
);
// ═══ Test: withTimeout cleans up timer on success ════════════════════════════
{
console.log("\n=== #2344: withTimeout cleans up timer on success ===");
// If the timer isn't cleaned up, this test would keep the process alive.
// Relying on process.exit behavior — if test completes, timers were cleaned.
const result = await withTimeout(
new Promise<string>((r) => setTimeout(() => r("delayed"), 50)),
5000,
"cleanup-test",
);
assertEq(result.value, "delayed", "should resolve with delayed value");
assertEq(result.timedOut, false, "should not time out");
}
// ═══ Test: runFinalize wraps BOTH pre and post verification with withTimeout ═
{
console.log(
"\n=== #3757: runFinalize wraps preVerification with timeout guard ===",
);
const { readFileSync } = await import("node:fs");
describe("runFinalize timeout guards in phases.ts", () => {
const phasesSource = readFileSync(
new URL("../auto/phases.ts", import.meta.url),
"utf-8",
);
const fnBody = getRunFinalizeBody(phasesSource);
// postUnitPreVerification must be wrapped in withTimeout
const preTimeoutIdx = fnBody.indexOf("withTimeout(");
assertTrue(preTimeoutIdx > 0, "withTimeout should appear in runFinalize");
it("wraps postUnitPreVerification with withTimeout", () => {
const preTimeoutIdx = fnBody.indexOf("withTimeout(");
assert.ok(preTimeoutIdx > 0, "withTimeout should appear in runFinalize");
const preVerIdx = fnBody.indexOf("postUnitPreVerification");
assertTrue(
preVerIdx > 0,
"postUnitPreVerification should appear in runFinalize",
);
const preVerIdx = fnBody.indexOf("postUnitPreVerification");
assert.ok(
preVerIdx > 0,
"postUnitPreVerification should appear in runFinalize",
);
// The first withTimeout should wrap postUnitPreVerification (not postUnitPostVerification)
const firstWithTimeout = fnBody.slice(preTimeoutIdx, preTimeoutIdx + 200);
assertTrue(
firstWithTimeout.includes("postUnitPreVerification"),
"first withTimeout in runFinalize should wrap postUnitPreVerification",
);
const firstWithTimeout = fnBody.slice(preTimeoutIdx, preTimeoutIdx + 200);
assert.ok(
firstWithTimeout.includes("postUnitPreVerification"),
"first withTimeout in runFinalize should wrap postUnitPreVerification",
);
});
// postUnitPostVerification must also be wrapped
const postVerIdx = fnBody.indexOf("postUnitPostVerification");
assertTrue(
postVerIdx > 0,
"postUnitPostVerification should appear in runFinalize",
);
it("wraps postUnitPostVerification with withTimeout", () => {
const postVerIdx = fnBody.indexOf("postUnitPostVerification");
assert.ok(
postVerIdx > 0,
"postUnitPostVerification should appear in runFinalize",
);
// Count withTimeout occurrences — should be at least 2 (pre + post)
const timeoutCount = (fnBody.match(/withTimeout\(/g) || []).length;
assertTrue(
timeoutCount >= 2,
`runFinalize should have at least 2 withTimeout guards (found ${timeoutCount})`,
);
}
const timeoutCount = (fnBody.match(/withTimeout\(/g) || []).length;
assert.ok(
timeoutCount >= 2,
`runFinalize should have at least 2 withTimeout guards (found ${timeoutCount})`,
);
});
// ═══ Test: MAX_FINALIZE_TIMEOUTS is defined and reasonable ═══════════════════
it("increments consecutiveFinalizeTimeouts in both pre and post handlers", () => {
const incrementCount = (
fnBody.match(/consecutiveFinalizeTimeouts\+\+/g) || []
).length;
assert.ok(
incrementCount >= 2,
`should increment consecutiveFinalizeTimeouts in both pre and post handlers (found ${incrementCount})`,
);
});
console.log("\n=== #3757: MAX_FINALIZE_TIMEOUTS is defined and reasonable ===");
it("checks MAX_FINALIZE_TIMEOUTS in both timeout handlers", () => {
const escalationCount = (fnBody.match(/MAX_FINALIZE_TIMEOUTS/g) || [])
.length;
assert.ok(
escalationCount >= 2,
`should check MAX_FINALIZE_TIMEOUTS in both handlers (found ${escalationCount})`,
);
});
assertTrue(
typeof MAX_FINALIZE_TIMEOUTS === "number",
"MAX_FINALIZE_TIMEOUTS should be a number",
);
assertTrue(
MAX_FINALIZE_TIMEOUTS >= 2,
`threshold should be >= 2 (got ${MAX_FINALIZE_TIMEOUTS})`,
);
assertTrue(
MAX_FINALIZE_TIMEOUTS <= 10,
`threshold should be <= 10 (got ${MAX_FINALIZE_TIMEOUTS})`,
);
it("detaches s.currentUnit in both timeout handlers", () => {
const detachCount = (fnBody.match(/s\.currentUnit\s*=\s*null/g) || [])
.length;
assert.ok(
detachCount >= 2,
`should detach s.currentUnit in both timeout handlers (found ${detachCount})`,
);
});
// ═══ Test: timeout handlers escalate after consecutive timeouts ══════════════
{
console.log(
"\n=== #3757: timeout handlers escalate and detach currentUnit ===",
);
const { readFileSync } = await import("node:fs");
const phasesSource = readFileSync(
new URL("../auto/phases.ts", import.meta.url),
"utf-8",
);
const fnBody = getRunFinalizeBody(phasesSource);
// Both timeout handlers should increment consecutiveFinalizeTimeouts
const incrementCount = (
fnBody.match(/consecutiveFinalizeTimeouts\+\+/g) || []
).length;
assertTrue(
incrementCount >= 2,
`should increment consecutiveFinalizeTimeouts in both pre and post handlers (found ${incrementCount})`,
);
// Both timeout handlers should check MAX_FINALIZE_TIMEOUTS for escalation
const escalationCount = (fnBody.match(/MAX_FINALIZE_TIMEOUTS/g) || []).length;
assertTrue(
escalationCount >= 2,
`should check MAX_FINALIZE_TIMEOUTS in both handlers (found ${escalationCount})`,
);
// Both timeout handlers should null out s.currentUnit to prevent late mutations
const detachCount = (fnBody.match(/s\.currentUnit\s*=\s*null/g) || []).length;
assertTrue(
detachCount >= 2,
`should detach s.currentUnit in both timeout handlers (found ${detachCount})`,
);
// Successful finalize should reset the counter
assertTrue(
fnBody.includes("consecutiveFinalizeTimeouts = 0"),
"should reset consecutiveFinalizeTimeouts on successful finalize",
);
}
report();
it("resets consecutiveFinalizeTimeouts on successful finalize", () => {
assert.ok(
fnBody.includes("consecutiveFinalizeTimeouts = 0"),
"should reset consecutiveFinalizeTimeouts on successful finalize",
);
});
});

View file

@ -1,15 +1,14 @@
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import { dirname, join } from "node:path";
import { fileURLToPath } from "node:url";
import { describe, it } from "vitest";
import { extractSection } from "../files.ts";
import { createTestContext } from "./test-helpers.ts";
const __dirname = dirname(fileURLToPath(import.meta.url));
const templatesDir = join(__dirname, "..", "templates");
const promptsDir = join(__dirname, "..", "prompts");
const { assertTrue, report } = createTestContext();
function loadTemplate(name: string): string {
return readFileSync(join(templatesDir, `${name}.md`), "utf-8");
}
@ -18,267 +17,243 @@ function loadPrompt(name: string): string {
return readFileSync(join(promptsDir, `${name}.md`), "utf-8");
}
// ═══════════════════════════════════════════════════════════════════════════
// Level 1: Templates contain quality gate headings
// ═══════════════════════════════════════════════════════════════════════════
describe("Level 1: Templates contain quality gate headings", () => {
it("plan.md contains ## Threat Surface", () => {
const plan = loadTemplate("plan");
assert.ok(plan.includes("## Threat Surface"));
});
console.log("\n=== Level 1: Templates contain quality gate headings ===");
{
const plan = loadTemplate("plan");
assertTrue(
plan.includes("## Threat Surface"),
"plan.md contains ## Threat Surface",
);
assertTrue(
plan.includes("## Requirement Impact"),
"plan.md contains ## Requirement Impact",
);
it("plan.md contains ## Requirement Impact", () => {
const plan = loadTemplate("plan");
assert.ok(plan.includes("## Requirement Impact"));
});
const taskPlan = loadTemplate("task-plan");
assertTrue(
taskPlan.includes("## Failure Modes"),
"task-plan.md contains ## Failure Modes",
);
assertTrue(
taskPlan.includes("## Load Profile"),
"task-plan.md contains ## Load Profile",
);
assertTrue(
taskPlan.includes("## Negative Tests"),
"task-plan.md contains ## Negative Tests",
);
it("task-plan.md contains ## Failure Modes", () => {
const taskPlan = loadTemplate("task-plan");
assert.ok(taskPlan.includes("## Failure Modes"));
});
const sliceSummary = loadTemplate("slice-summary");
assertTrue(
sliceSummary.includes("## Operational Readiness"),
"slice-summary.md contains ## Operational Readiness",
);
it("task-plan.md contains ## Load Profile", () => {
const taskPlan = loadTemplate("task-plan");
assert.ok(taskPlan.includes("## Load Profile"));
});
const roadmap = loadTemplate("roadmap");
assertTrue(
roadmap.includes("## Horizontal Checklist"),
"roadmap.md contains ## Horizontal Checklist",
);
it("task-plan.md contains ## Negative Tests", () => {
const taskPlan = loadTemplate("task-plan");
assert.ok(taskPlan.includes("## Negative Tests"));
});
const milestoneSummary = loadTemplate("milestone-summary");
assertTrue(
milestoneSummary.includes("## Decision Re-evaluation"),
"milestone-summary.md contains ## Decision Re-evaluation",
);
}
it("slice-summary.md contains ## Operational Readiness", () => {
const sliceSummary = loadTemplate("slice-summary");
assert.ok(sliceSummary.includes("## Operational Readiness"));
});
// ═══════════════════════════════════════════════════════════════════════════
// Level 2: Prompts reference quality gates
// ═══════════════════════════════════════════════════════════════════════════
it("roadmap.md contains ## Horizontal Checklist", () => {
const roadmap = loadTemplate("roadmap");
assert.ok(roadmap.includes("## Horizontal Checklist"));
});
console.log("\n=== Level 2: Prompts reference quality gates ===");
{
const planSlice = loadPrompt("plan-slice");
assertTrue(
planSlice.includes("Threat Surface"),
"plan-slice.md mentions Threat Surface",
);
assertTrue(
planSlice.includes("Requirement Impact"),
"plan-slice.md mentions Requirement Impact",
);
assertTrue(
planSlice.toLowerCase().includes("quality gate"),
"plan-slice.md mentions quality gate",
);
it("milestone-summary.md contains ## Decision Re-evaluation", () => {
const milestoneSummary = loadTemplate("milestone-summary");
assert.ok(milestoneSummary.includes("## Decision Re-evaluation"));
});
});
const guidedPlanSlice = loadPrompt("guided-plan-slice");
assertTrue(
guidedPlanSlice.includes("Threat Surface") ||
guidedPlanSlice.includes("Q3"),
"guided-plan-slice.md mentions Threat Surface or Q3",
);
describe("Level 2: Prompts reference quality gates", () => {
it("plan-slice.md mentions Threat Surface", () => {
const planSlice = loadPrompt("plan-slice");
assert.ok(planSlice.includes("Threat Surface"));
});
const executeTask = loadPrompt("execute-task");
assertTrue(
executeTask.includes("Failure Modes"),
"execute-task.md mentions Failure Modes",
);
assertTrue(
executeTask.includes("Load Profile"),
"execute-task.md mentions Load Profile",
);
assertTrue(
executeTask.includes("Negative Tests"),
"execute-task.md mentions Negative Tests",
);
it("plan-slice.md mentions Requirement Impact", () => {
const planSlice = loadPrompt("plan-slice");
assert.ok(planSlice.includes("Requirement Impact"));
});
const guidedExecuteTask = loadPrompt("guided-execute-task");
assertTrue(
guidedExecuteTask.includes("Failure Modes") ||
guidedExecuteTask.includes("Q5"),
"guided-execute-task.md mentions Failure Modes or Q5",
);
it("plan-slice.md mentions quality gate", () => {
const planSlice = loadPrompt("plan-slice");
assert.ok(planSlice.toLowerCase().includes("quality gate"));
});
const completeSlice = loadPrompt("complete-slice");
assertTrue(
completeSlice.includes("Operational Readiness"),
"complete-slice.md mentions Operational Readiness",
);
it("guided-plan-slice.md mentions Threat Surface or Q3", () => {
const guidedPlanSlice = loadPrompt("guided-plan-slice");
assert.ok(
guidedPlanSlice.includes("Threat Surface") ||
guidedPlanSlice.includes("Q3"),
);
});
const guidedCompleteSlice = loadPrompt("guided-complete-slice");
assertTrue(
guidedCompleteSlice.includes("Operational Readiness") ||
guidedCompleteSlice.includes("Q8"),
"guided-complete-slice.md mentions Operational Readiness or Q8",
);
it("execute-task.md mentions Failure Modes", () => {
const executeTask = loadPrompt("execute-task");
assert.ok(executeTask.includes("Failure Modes"));
});
const completeMilestone = loadPrompt("complete-milestone");
assertTrue(
completeMilestone.includes("Horizontal Checklist"),
"complete-milestone.md mentions Horizontal Checklist",
);
assertTrue(
completeMilestone.includes("Decision Re-evaluation"),
"complete-milestone.md mentions Decision Re-evaluation",
);
it("execute-task.md mentions Load Profile", () => {
const executeTask = loadPrompt("execute-task");
assert.ok(executeTask.includes("Load Profile"));
});
const planMilestone = loadPrompt("plan-milestone");
assertTrue(
planMilestone.toLowerCase().includes("horizontal checklist"),
"plan-milestone.md mentions horizontal checklist",
);
it("execute-task.md mentions Negative Tests", () => {
const executeTask = loadPrompt("execute-task");
assert.ok(executeTask.includes("Negative Tests"));
});
const guidedPlanMilestone = loadPrompt("guided-plan-milestone");
assertTrue(
guidedPlanMilestone.includes("Horizontal Checklist"),
"guided-plan-milestone.md mentions Horizontal Checklist",
);
it("guided-execute-task.md mentions Failure Modes or Q5", () => {
const guidedExecuteTask = loadPrompt("guided-execute-task");
assert.ok(
guidedExecuteTask.includes("Failure Modes") ||
guidedExecuteTask.includes("Q5"),
);
});
const reassess = loadPrompt("reassess-roadmap");
assertTrue(
reassess.includes("Threat Surface"),
"reassess-roadmap.md mentions Threat Surface",
);
assertTrue(
reassess.includes("Operational Readiness"),
"reassess-roadmap.md mentions Operational Readiness",
);
assertTrue(
reassess.includes("Horizontal Checklist"),
"reassess-roadmap.md mentions Horizontal Checklist",
);
it("complete-slice.md mentions Operational Readiness", () => {
const completeSlice = loadPrompt("complete-slice");
assert.ok(completeSlice.includes("Operational Readiness"));
});
const replan = loadPrompt("replan-slice");
assertTrue(
replan.includes("Threat Surface"),
"replan-slice.md mentions Threat Surface",
);
}
it("guided-complete-slice.md mentions Operational Readiness or Q8", () => {
const guidedCompleteSlice = loadPrompt("guided-complete-slice");
assert.ok(
guidedCompleteSlice.includes("Operational Readiness") ||
guidedCompleteSlice.includes("Q8"),
);
});
// ═══════════════════════════════════════════════════════════════════════════
// Level 3: Parser backward compatibility — extractSection handles new headings
// ═══════════════════════════════════════════════════════════════════════════
it("complete-milestone.md mentions Horizontal Checklist", () => {
const completeMilestone = loadPrompt("complete-milestone");
assert.ok(completeMilestone.includes("Horizontal Checklist"));
});
console.log("\n=== Level 3: extractSection backward compatibility ===");
{
// Old-style slice plan (no quality gate sections)
const oldPlan = `# S01: Auth Flow
it("complete-milestone.md mentions Decision Re-evaluation", () => {
const completeMilestone = loadPrompt("complete-milestone");
assert.ok(completeMilestone.includes("Decision Re-evaluation"));
});
**Goal:** Build login
**Demo:** User can log in
it("plan-milestone.md mentions horizontal checklist", () => {
const planMilestone = loadPrompt("plan-milestone");
assert.ok(planMilestone.toLowerCase().includes("horizontal checklist"));
});
it("guided-plan-milestone.md mentions Horizontal Checklist", () => {
const guidedPlanMilestone = loadPrompt("guided-plan-milestone");
assert.ok(guidedPlanMilestone.includes("Horizontal Checklist"));
});
it("reassess-roadmap.md mentions Threat Surface", () => {
const reassess = loadPrompt("reassess-roadmap");
assert.ok(reassess.includes("Threat Surface"));
});
it("reassess-roadmap.md mentions Operational Readiness", () => {
const reassess = loadPrompt("reassess-roadmap");
assert.ok(reassess.includes("Operational Readiness"));
});
it("reassess-roadmap.md mentions Horizontal Checklist", () => {
const reassess = loadPrompt("reassess-roadmap");
assert.ok(reassess.includes("Horizontal Checklist"));
});
it("replan-slice.md mentions Threat Surface", () => {
const replan = loadPrompt("replan-slice");
assert.ok(replan.includes("Threat Surface"));
});
});
describe("Level 3: Parser backward compatibility — extractSection handles new headings", () => {
it("extractSection returns null for Threat Surface on old plan", () => {
const oldPlan = `# S01: Auth Flow
## Must-Haves
- Login form works
- Session persists
## Proof Level
- This slice proves: integration
## Tasks
- [ ] **T01: Build login** \`est:1h\`
`;
assert.strictEqual(extractSection(oldPlan, "Threat Surface"), null);
});
// New-style slice plan (with quality gate sections)
const newPlan = `# S01: Auth Flow
**Goal:** Build login
**Demo:** User can log in
it("extractSection returns null for Requirement Impact on old plan", () => {
const oldPlan = `# S01: Auth Flow
## Must-Haves
- Login form works
## Proof Level
## Tasks
- [ ] **T01: Build login** \`est:1h\`
`;
assert.strictEqual(extractSection(oldPlan, "Requirement Impact"), null);
});
it("extractSection still parses Must-Haves on old plan", () => {
const oldPlan = `# S01: Auth Flow
## Must-Haves
- Login form works
## Proof Level
## Tasks
- [ ] **T01: Build login** \`est:1h\`
`;
const mustHaves = extractSection(oldPlan, "Must-Haves");
assert.ok(mustHaves !== null && mustHaves.includes("Login form works"));
});
it("extractSection extracts Threat Surface content from new plan", () => {
const newPlan = `# S01: Auth Flow
## Must-Haves
- Login form works
- Session persists
## Threat Surface
- **Abuse**: Credential stuffing, brute force login attempts
- **Data exposure**: Session tokens in cookies, password in request body
- **Input trust**: Username/password from form input reaching DB query
## Requirement Impact
- **Requirements touched**: R001, R003
- **Re-verify**: Login flow, session management
- **Decisions revisited**: D002
- **Abuse**: Credential stuffing
## Proof Level
- This slice proves: integration
## Tasks
- [ ] **T01: Build login** \`est:1h\`
`;
const threatSurface = extractSection(newPlan, "Threat Surface");
assert.ok(threatSurface !== null && threatSurface.includes("Credential stuffing"));
});
// Old plan: quality gate sections return null (not found)
assertTrue(
extractSection(oldPlan, "Threat Surface") === null,
"extractSection returns null for Threat Surface on old plan",
);
assertTrue(
extractSection(oldPlan, "Requirement Impact") === null,
"extractSection returns null for Requirement Impact on old plan",
);
it("extractSection extracts Requirement Impact content from new plan", () => {
const newPlan = `# S01: Auth Flow
// Old plan: core sections still parse correctly
const oldMustHaves = extractSection(oldPlan, "Must-Haves");
assertTrue(
oldMustHaves !== null && oldMustHaves.includes("Login form works"),
"extractSection still parses Must-Haves on old plan",
);
## Must-Haves
// New plan: quality gate sections are extracted
const threatSurface = extractSection(newPlan, "Threat Surface");
assertTrue(
threatSurface !== null && threatSurface.includes("Credential stuffing"),
"extractSection extracts Threat Surface content from new plan",
);
- Login form works
const reqImpact = extractSection(newPlan, "Requirement Impact");
assertTrue(
reqImpact !== null && reqImpact.includes("R001"),
"extractSection extracts Requirement Impact content from new plan",
);
## Requirement Impact
// New plan: core sections still parse correctly
const newMustHaves = extractSection(newPlan, "Must-Haves");
assertTrue(
newMustHaves !== null && newMustHaves.includes("Login form works"),
"extractSection still parses Must-Haves on new plan",
);
- **Requirements touched**: R001
// Task plan: Failure Modes
const oldTaskPlan = `# T01: Build Login
## Proof Level
## Description
## Tasks
Build the login endpoint.
## Steps
1. Create route
- [ ] **T01: Build login** \`est:1h\`
`;
const reqImpact = extractSection(newPlan, "Requirement Impact");
assert.ok(reqImpact !== null && reqImpact.includes("R001"));
});
const newTaskPlan = `# T01: Build Login
it("extractSection extracts Failure Modes from new task plan", () => {
const newTaskPlan = `# T01: Build Login
## Description
@ -286,41 +261,18 @@ Build the login endpoint.
## Failure Modes
| Dependency | On error | On timeout | On malformed response |
|------------|----------|-----------|----------------------|
| Auth DB | Return 500 | 3s timeout, retry once | Reject, log warning |
| Dependency | On error |
## Steps
1. Create route
`;
const failureModes = extractSection(newTaskPlan, "Failure Modes");
assert.ok(failureModes !== null && failureModes.includes("Dependency"));
});
assertTrue(
extractSection(oldTaskPlan, "Failure Modes") === null,
"extractSection returns null for Failure Modes on old task plan",
);
const failureModes = extractSection(newTaskPlan, "Failure Modes");
assertTrue(
failureModes !== null && failureModes.includes("Auth DB"),
"extractSection extracts Failure Modes content from new task plan",
);
// Slice summary: Operational Readiness
const oldSummary = `# S01: Auth Flow
**Built login with session management**
## Verification
All tests pass.
## Deviations
None.
`;
const newSummary = `# S01: Auth Flow
it("extractSection extracts Operational Readiness from new summary", () => {
const newSummary = `# S01: Auth Flow
**Built login with session management**
@ -330,90 +282,64 @@ All tests pass.
## Operational Readiness
- **Health signal**: /health endpoint returns 200 with session count
- **Failure signal**: Auth error rate > 5% triggers alert
- **Recovery**: Stateless restart clears nothing
- **Monitoring gaps**: None
- **Health signal**: /health endpoint
## Deviations
None.
`;
const opReadiness = extractSection(newSummary, "Operational Readiness");
assert.ok(opReadiness !== null && opReadiness.includes("/health endpoint"));
});
});
assertTrue(
extractSection(oldSummary, "Operational Readiness") === null,
"extractSection returns null for Operational Readiness on old summary",
);
describe("Level 4: Template section ordering is correct", () => {
it("plan.md: Threat Surface is between Must-Haves and Proof Level", () => {
const plan = loadTemplate("plan");
const mustHavesIdx = plan.indexOf("## Must-Haves");
const threatIdx = plan.indexOf("## Threat Surface");
const proofIdx = plan.indexOf("## Proof Level");
assert.ok(mustHavesIdx < threatIdx && threatIdx < proofIdx);
});
const opReadiness = extractSection(newSummary, "Operational Readiness");
assertTrue(
opReadiness !== null && opReadiness.includes("/health endpoint"),
"extractSection extracts Operational Readiness content from new summary",
);
}
it("plan.md: Requirement Impact is between Threat Surface and Proof Level", () => {
const plan = loadTemplate("plan");
const threatIdx = plan.indexOf("## Threat Surface");
const reqImpactIdx = plan.indexOf("## Requirement Impact");
const proofIdx = plan.indexOf("## Proof Level");
assert.ok(threatIdx < reqImpactIdx && reqImpactIdx < proofIdx);
});
// ═══════════════════════════════════════════════════════════════════════════
// Level 4: Template section ordering is correct
// ═══════════════════════════════════════════════════════════════════════════
it("task-plan.md: Failure Modes is between Description and Steps", () => {
const taskPlan = loadTemplate("task-plan");
const descIdx = taskPlan.indexOf("## Description");
const failIdx = taskPlan.indexOf("## Failure Modes");
const stepsIdx = taskPlan.indexOf("## Steps");
assert.ok(descIdx < failIdx && failIdx < stepsIdx);
});
console.log("\n=== Level 4: Template section ordering ===");
{
const plan = loadTemplate("plan");
const mustHavesIdx = plan.indexOf("## Must-Haves");
const threatIdx = plan.indexOf("## Threat Surface");
const proofIdx = plan.indexOf("## Proof Level");
assertTrue(
mustHavesIdx < threatIdx && threatIdx < proofIdx,
"plan.md: Threat Surface is between Must-Haves and Proof Level",
);
it("task-plan.md: Failure Modes < Load Profile < Negative Tests < Steps", () => {
const taskPlan = loadTemplate("task-plan");
const failIdx = taskPlan.indexOf("## Failure Modes");
const loadIdx = taskPlan.indexOf("## Load Profile");
const negIdx = taskPlan.indexOf("## Negative Tests");
const stepsIdx = taskPlan.indexOf("## Steps");
assert.ok(failIdx < loadIdx && loadIdx < negIdx && negIdx < stepsIdx);
});
const reqImpactIdx = plan.indexOf("## Requirement Impact");
assertTrue(
threatIdx < reqImpactIdx && reqImpactIdx < proofIdx,
"plan.md: Requirement Impact is between Threat Surface and Proof Level",
);
it("slice-summary.md: Operational Readiness is between Requirements Invalidated and Deviations", () => {
const sliceSummary = loadTemplate("slice-summary");
const reqInvalidIdx = sliceSummary.indexOf("## Requirements Invalidated");
const opIdx = sliceSummary.indexOf("## Operational Readiness");
const devIdx = sliceSummary.indexOf("## Deviations");
assert.ok(reqInvalidIdx < opIdx && opIdx < devIdx);
});
const taskPlan = loadTemplate("task-plan");
const descIdx = taskPlan.indexOf("## Description");
const failIdx = taskPlan.indexOf("## Failure Modes");
const stepsIdx = taskPlan.indexOf("## Steps");
assertTrue(
descIdx < failIdx && failIdx < stepsIdx,
"task-plan.md: Failure Modes is between Description and Steps",
);
const loadIdx = taskPlan.indexOf("## Load Profile");
const negIdx = taskPlan.indexOf("## Negative Tests");
assertTrue(
failIdx < loadIdx && loadIdx < negIdx && negIdx < stepsIdx,
"task-plan.md: Failure Modes < Load Profile < Negative Tests < Steps",
);
const sliceSummary = loadTemplate("slice-summary");
const reqInvalidIdx = sliceSummary.indexOf("## Requirements Invalidated");
const opIdx = sliceSummary.indexOf("## Operational Readiness");
const devIdx = sliceSummary.indexOf("## Deviations");
assertTrue(
reqInvalidIdx < opIdx && opIdx < devIdx,
"slice-summary.md: Operational Readiness is between Requirements Invalidated and Deviations",
);
const roadmap = loadTemplate("roadmap");
const horizIdx = roadmap.indexOf("## Horizontal Checklist");
const boundaryIdx = roadmap.indexOf("## Boundary Map");
assertTrue(
horizIdx > 0 && horizIdx < boundaryIdx,
"roadmap.md: Horizontal Checklist is before Boundary Map",
);
const milestoneSummary = loadTemplate("milestone-summary");
const reqChangesIdx = milestoneSummary.indexOf("## Requirement Changes");
const decRevalIdx = milestoneSummary.indexOf("## Decision Re-evaluation");
const fwdIntelIdx = milestoneSummary.indexOf("## Forward Intelligence");
assertTrue(
reqChangesIdx < decRevalIdx && decRevalIdx < fwdIntelIdx,
"milestone-summary.md: Decision Re-evaluation is between Requirement Changes and Forward Intelligence",
);
}
report();
it("milestone-summary.md: Decision Re-evaluation is between Requirement Changes and Forward Intelligence", () => {
const milestoneSummary = loadTemplate("milestone-summary");
const reqChangesIdx = milestoneSummary.indexOf("## Requirement Changes");
const decRevalIdx = milestoneSummary.indexOf("## Decision Re-evaluation");
const fwdIntelIdx = milestoneSummary.indexOf("## Forward Intelligence");
assert.ok(reqChangesIdx < decRevalIdx && decRevalIdx < fwdIntelIdx);
});
});

View file

@ -13,6 +13,7 @@
* - onCompromised does not declare compromise when lock file is transiently unreadable
*/
import assert from "node:assert/strict";
import { spawn } from "node:child_process";
import {
chmodSync,
@ -24,6 +25,7 @@ import {
} from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { describe, it } from "vitest";
import { sfRoot } from "../paths.ts";
import {
acquireSessionLock,
@ -32,14 +34,9 @@ import {
releaseSessionLock,
type SessionLockData,
} from "../session-lock.ts";
import { createTestContext } from "./test-helpers.ts";
const { assertEq, assertTrue, report } = createTestContext();
async function main(): Promise<void> {
// ─── 1. readExistingLockDataWithRetry succeeds on first read when file is fine ─
console.log("\n=== 1. readExistingLockDataWithRetry reads file normally ===");
{
describe("#2324: transient lock file unreadability", () => {
it("reads file normally when readable", async () => {
const base = mkdtempSync(join(tmpdir(), "sf-transient-"));
mkdirSync(join(base, ".sf"), { recursive: true });
@ -56,44 +53,31 @@ async function main(): Promise<void> {
writeFileSync(lockFile, JSON.stringify(lockData, null, 2));
const result = readExistingLockDataWithRetry(lockFile);
assertTrue(result !== null, "data returned for readable file");
assertEq(result!.pid, process.pid, "correct PID read");
assertEq(
result!.sessionFile,
"test-session.json",
"correct sessionFile read",
);
assert.ok(result !== null, "data returned for readable file");
assert.strictEqual(result!.pid, process.pid);
assert.strictEqual(result!.sessionFile, "test-session.json");
} finally {
rmSync(base, { recursive: true, force: true });
}
}
});
// ─── 2. readExistingLockDataWithRetry returns null for truly missing file ──
console.log(
"\n=== 2. readExistingLockDataWithRetry returns null for missing file ===",
);
{
it("returns null for truly missing file after retries", async () => {
const base = mkdtempSync(join(tmpdir(), "sf-transient-"));
mkdirSync(join(base, ".sf"), { recursive: true });
try {
const lockFile = join(sfRoot(base), "auto.lock");
// File doesn't exist
const result = readExistingLockDataWithRetry(lockFile, {
maxAttempts: 2,
delayMs: 10,
});
assertEq(result, null, "null for truly missing file after retries");
assert.strictEqual(result, null);
} finally {
rmSync(base, { recursive: true, force: true });
}
}
});
// ─── 3. readExistingLockDataWithRetry recovers after transient rename ──────
console.log(
"\n=== 3. readExistingLockDataWithRetry recovers after transient unavailability ===",
);
{
it("recovers after transient unavailability", async () => {
const base = mkdtempSync(join(tmpdir(), "sf-transient-"));
mkdirSync(join(base, ".sf"), { recursive: true });
@ -110,10 +94,6 @@ async function main(): Promise<void> {
};
writeFileSync(lockFile, JSON.stringify(lockData, null, 2));
// Simulate transient unavailability: move file away, spawn a child process
// to restore it shortly after. The child runs outside our event loop so it
// fires even during busy-wait retries. Give the test extra retry budget so
// it stays stable under full-suite CPU contention.
renameSync(lockFile, tmpFile);
spawn("bash", ["-c", `sleep 0.05 && mv "${tmpFile}" "${lockFile}"`], {
stdio: "ignore",
@ -124,28 +104,14 @@ async function main(): Promise<void> {
maxAttempts: 8,
delayMs: 400,
});
assertTrue(
result !== null,
"data recovered after transient unavailability",
);
if (result) {
assertEq(result.pid, process.pid, "correct PID after recovery");
assertEq(
result.sessionFile,
"recovery-session.json",
"correct sessionFile after recovery",
);
}
assert.ok(result !== null, "data recovered after transient unavailability");
assert.strictEqual(result!.sessionFile, "recovery-session.json");
} finally {
rmSync(base, { recursive: true, force: true });
}
}
});
// ─── 4. readExistingLockDataWithRetry recovers from transient permission error ─
console.log(
"\n=== 4. readExistingLockDataWithRetry recovers from transient permission error ===",
);
{
it("recovers from transient permission error", async () => {
const base = mkdtempSync(join(tmpdir(), "sf-transient-"));
mkdirSync(join(base, ".sf"), { recursive: true });
@ -161,9 +127,6 @@ async function main(): Promise<void> {
};
writeFileSync(lockFile, JSON.stringify(lockData, null, 2));
// Remove read permission to simulate NFS/CIFS latency, then spawn a child
// to restore permissions shortly after (runs outside our event loop).
// Use the same wider retry window as the rename case for full-suite stability.
chmodSync(lockFile, 0o000);
spawn("bash", ["-c", `sleep 0.05 && chmod 644 "${lockFile}"`], {
stdio: "ignore",
@ -174,19 +137,8 @@ async function main(): Promise<void> {
maxAttempts: 8,
delayMs: 400,
});
assertTrue(
result !== null,
"data recovered after transient permission error",
);
if (result) {
assertEq(
result.pid,
process.pid,
"correct PID after permission recovery",
);
}
assert.ok(result !== null, "data recovered after transient permission error");
// Ensure permissions restored for cleanup
try {
chmodSync(lockFile, 0o644);
} catch {
@ -195,30 +147,23 @@ async function main(): Promise<void> {
} finally {
rmSync(base, { recursive: true, force: true });
}
}
});
// ─── 5. getSessionLockStatus does not false-positive on transient read failure ─
console.log(
"\n=== 5. getSessionLockStatus tolerates transient lock file unavailability ===",
);
{
it("tolerates transient lock file unavailability in getSessionLockStatus", async () => {
const base = mkdtempSync(join(tmpdir(), "sf-transient-"));
mkdirSync(join(base, ".sf"), { recursive: true });
try {
const result = acquireSessionLock(base);
assertTrue(result.acquired, "lock acquired");
assert.ok(result.acquired, "lock acquired");
// Validate works initially
const status1 = getSessionLockStatus(base);
assertTrue(status1.valid, "lock valid before transient failure");
assert.ok(status1.valid, "lock valid before transient failure");
// Temporarily hide the lock file
const lockFile = join(sfRoot(base), "auto.lock");
const tmpFile = lockFile + ".hidden";
renameSync(lockFile, tmpFile);
// Schedule restoration
setTimeout(() => {
try {
renameSync(tmpFile, lockFile);
@ -227,17 +172,13 @@ async function main(): Promise<void> {
}
}, 30);
// Small delay to ensure restoration runs, then check — with the OS lock
// still held, getSessionLockStatus should return valid=true even if the
// lock file was briefly missing (it checks _releaseFunction first).
await new Promise((r) => setTimeout(r, 60));
const status2 = getSessionLockStatus(base);
assertTrue(
assert.ok(
status2.valid,
"lock still valid after transient file disappearance (OS lock held)",
);
// Restore if not yet restored
try {
renameSync(tmpFile, lockFile);
} catch {
@ -248,13 +189,9 @@ async function main(): Promise<void> {
} finally {
rmSync(base, { recursive: true, force: true });
}
}
});
// ─── 6. Retry defaults: 3 attempts with 200ms delay ────────────────────────
console.log(
"\n=== 6. Default retry params: function works with defaults ===",
);
{
it("default retry params work for readable file", async () => {
const base = mkdtempSync(join(tmpdir(), "sf-transient-"));
mkdirSync(join(base, ".sf"), { recursive: true });
@ -270,18 +207,10 @@ async function main(): Promise<void> {
};
writeFileSync(lockFile, JSON.stringify(lockData, null, 2));
// Call with no options — uses defaults (3 attempts, 200ms)
const result = readExistingLockDataWithRetry(lockFile);
assertTrue(result !== null, "default params work for readable file");
assert.ok(result !== null, "default params work for readable file");
} finally {
rmSync(base, { recursive: true, force: true });
}
}
report();
}
main().catch((error) => {
console.error(error);
process.exit(1);
});
});

View file

@ -5,61 +5,56 @@
* refuse to start auto-mode. Otherwise sf_task_complete returns
* "db_unavailable", artifact retry re-dispatches the same task, and
* the session loops forever.
*
* This test verifies the gate by reading auto-start.ts source and
* confirming the pattern: after the DB lifecycle block, if the DB
* file exists on disk but isDbAvailable() still returns false after
* the open attempt, bootstrap must abort with an error notification.
*/
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import { join } from "node:path";
import { createTestContext } from "./test-helpers.ts";
const { assertTrue, report } = createTestContext();
import { describe, it } from "vitest";
const srcPath = join(import.meta.dirname, "..", "auto-start.ts");
const src = readFileSync(srcPath, "utf-8");
console.log("\n=== #2419: SQLite unavailable gate in auto-start.ts ===");
describe("#2419: SQLite unavailable gate in auto-start.ts", () => {
it("has a DB lifecycle section", () => {
const dbLifecycleIdx = src.indexOf("DB lifecycle");
assert.ok(dbLifecycleIdx > 0);
});
// The DB lifecycle section tries to open the DB. After those try/catch
// blocks, there must be a HARD GATE: if the DB file exists on disk but
// isDbAvailable() is still false (open failed), bootstrap must abort
// by calling releaseLockAndReturn() with an error notification.
it("DB lifecycle section still checks for unavailable DB state", () => {
const afterDbLifecycle = src.slice(src.indexOf("DB lifecycle"));
assert.ok(afterDbLifecycle.includes("!isDbAvailable()"));
});
const dbLifecycleIdx = src.indexOf("DB lifecycle");
assertTrue(dbLifecycleIdx > 0, "auto-start.ts has a DB lifecycle section");
it("has a hard abort gate when sf.db exists but SQLite is still unavailable", () => {
const afterDbLifecycle = src.slice(src.indexOf("DB lifecycle"));
const afterDbLifecycle = src.slice(dbLifecycleIdx);
const gateMatch = afterDbLifecycle.match(
/if\s*\(existsSync\(sfDbPath\)\s*&&\s*!isDbAvailable\(\)\)\s*\{[\s\S]*?releaseLockAndReturn\(\);[\s\S]*?\}/,
);
// The DB lifecycle section may contain multiple isDbAvailable() checks now that
// cold-start bootstrap can pre-open the DB earlier in the file. What matters
// for #2419 is the explicit abort gate after the DB open attempts.
assertTrue(
afterDbLifecycle.includes("!isDbAvailable()"),
"DB lifecycle section still checks for unavailable DB state (#2419)",
);
assert.ok(!!gateMatch);
});
const gateMatch = afterDbLifecycle.match(
/if\s*\(existsSync\(sfDbPath\)\s*&&\s*!isDbAvailable\(\)\)\s*\{[\s\S]*?releaseLockAndReturn\(\);[\s\S]*?\}/,
);
it("DB availability gate calls releaseLockAndReturn() to abort bootstrap", () => {
const afterDbLifecycle = src.slice(src.indexOf("DB lifecycle"));
assertTrue(
!!gateMatch,
"auto-start.ts has a hard abort gate when sf.db exists but SQLite is still unavailable (#2419)",
);
const gateMatch = afterDbLifecycle.match(
/if\s*\(existsSync\(sfDbPath\)\s*&&\s*!isDbAvailable\(\)\)\s*\{[\s\S]*?releaseLockAndReturn\(\);[\s\S]*?\}/,
);
if (gateMatch) {
const gateRegion = gateMatch[0];
assertTrue(
gateRegion.includes("releaseLockAndReturn"),
"The DB availability gate calls releaseLockAndReturn() to abort bootstrap (#2419)",
);
assertTrue(
/database|sqlite|db.*unavailable/i.test(gateRegion),
"The DB availability gate includes a user-facing error message about the database (#2419)",
);
}
assert.ok(gateMatch && gateMatch[0].includes("releaseLockAndReturn"));
});
report();
it("DB availability gate includes user-facing error message about the database", () => {
const afterDbLifecycle = src.slice(src.indexOf("DB lifecycle"));
const gateMatch = afterDbLifecycle.match(
/if\s*\(existsSync\(sfDbPath\)\s*&&\s*!isDbAvailable\(\)\)\s*\{[\s\S]*?releaseLockAndReturn\(\);[\s\S]*?\}/,
);
assert.ok(
gateMatch && /database|sqlite|db.*unavailable/i.test(gateMatch[0]),
);
});
});

View file

@ -11,99 +11,60 @@
* s.currentUnit has been nulled by a concurrent stopAuto().
*/
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import { join } from "node:path";
import { createTestContext } from "./test-helpers.ts";
const { assertTrue, report } = createTestContext();
import { describe, it } from "vitest";
const phasesPath = join(import.meta.dirname, "..", "auto", "phases.ts");
const phasesSrc = readFileSync(phasesPath, "utf-8");
console.log(
"\n=== #2939: stopAuto race — null guard on s.currentUnit in closeout ===",
);
describe("#2939: stopAuto race — null guard on s.currentUnit", () => {
it("closeoutUnit call is guarded by if (s.currentUnit)", () => {
const closeoutComment = "Immediate unit closeout";
const closeoutIdx = phasesSrc.indexOf(closeoutComment);
assert.ok(closeoutIdx > 0);
// ── Test 1: closeoutUnit call is guarded by if (s.currentUnit) ──────────
// The closeout block starting around the "Immediate unit closeout" comment
// must be wrapped in an `if (s.currentUnit)` guard, matching the pattern
// already used at lines 136 and 344.
const closeoutRegion = phasesSrc.slice(closeoutIdx, closeoutIdx + 500);
assert.ok(closeoutRegion.includes("if (s.currentUnit)"));
});
const closeoutComment = "Immediate unit closeout";
const closeoutIdx = phasesSrc.indexOf(closeoutComment);
assertTrue(
closeoutIdx > 0,
"phases.ts contains the 'Immediate unit closeout' comment block",
);
it("zero-tool-call guard no longer uses non-null assertion on s.currentUnit", () => {
const zeroToolComment = "Zero tool-call guard";
const zeroToolIdx = phasesSrc.indexOf(zeroToolComment);
assert.ok(zeroToolIdx > 0);
// Extract the region from the closeout comment to the next section comment
const closeoutRegion = phasesSrc.slice(closeoutIdx, closeoutIdx + 500);
assertTrue(
closeoutRegion.includes("if (s.currentUnit)"),
"closeoutUnit call is guarded by `if (s.currentUnit)` check (#2939)",
);
const zeroToolRegion = phasesSrc.slice(zeroToolIdx, zeroToolIdx + 600);
assert.ok(!zeroToolRegion.includes("s.currentUnit!.startedAt"));
});
// ── Test 2: zero-tool-call guard uses s.currentUnit?.startedAt ──────────
// The zero-tool-call section accesses s.currentUnit!.startedAt (non-null
// assertion) which will throw if currentUnit is null.
it("no non-null assertions s.currentUnit!.startedAt after closeout comment", () => {
const closeoutComment = "Immediate unit closeout";
const closeoutIdx = phasesSrc.indexOf(closeoutComment);
const afterCloseout = phasesSrc.slice(closeoutIdx);
const zeroToolComment = "Zero tool-call guard";
const zeroToolIdx = phasesSrc.indexOf(zeroToolComment);
assertTrue(
zeroToolIdx > 0,
"phases.ts contains the 'Zero tool-call guard' comment block",
);
const nonNullPattern = /s\.currentUnit!\.startedAt/g;
const nonNullAfterCloseout = [...afterCloseout.matchAll(nonNullPattern)];
assert.strictEqual(nonNullAfterCloseout.length, 0);
});
const zeroToolRegion = phasesSrc.slice(zeroToolIdx, zeroToolIdx + 600);
it("no return statements use bare s.currentUnit.startedAt after closeout comment", () => {
const closeoutComment = "Immediate unit closeout";
const closeoutIdx = phasesSrc.indexOf(closeoutComment);
const afterCloseout = phasesSrc.slice(closeoutIdx);
// The non-null assertion `s.currentUnit!.startedAt` must be replaced with
// optional chaining `s.currentUnit?.startedAt`
assertTrue(
!zeroToolRegion.includes("s.currentUnit!.startedAt"),
"zero-tool-call guard no longer uses non-null assertion on s.currentUnit (#2939)",
);
const returnWithBareAccess = /return\s*\{[^}]*s\.currentUnit\.startedAt/g;
const bareReturnCount = [...afterCloseout.matchAll(returnWithBareAccess)]
.length;
assert.strictEqual(bareReturnCount, 0);
});
// ── Test 3: return value uses optional chaining for startedAt ───────────
// The final return at the end of runUnitPhase uses s.currentUnit.startedAt
// which will throw if currentUnit was nulled. It must use optional chaining.
it("final return uses s.currentUnit?.startedAt with optional chaining", () => {
const closeoutComment = "Immediate unit closeout";
const closeoutIdx = phasesSrc.indexOf(closeoutComment);
const afterCloseout = phasesSrc.slice(closeoutIdx);
// Find the last return statement in runUnitPhase that references startedAt.
// There are two: one inside the zero-tool-call block and one at the end.
// Both must use s.currentUnit?.startedAt
// Count unguarded s.currentUnit.startedAt (without optional chaining)
// after the "Immediate unit closeout" comment. All of them should use
// optional chaining or be inside a guard.
const afterCloseout = phasesSrc.slice(closeoutIdx);
// Count s.currentUnit!.startedAt (non-null assertion — always unsafe)
const nonNullPattern = /s\.currentUnit!\.startedAt/g;
const nonNullAfterCloseout = [...afterCloseout.matchAll(nonNullPattern)];
assertTrue(
nonNullAfterCloseout.length === 0,
`no non-null assertions s.currentUnit!.startedAt after closeout comment (found ${nonNullAfterCloseout.length}, expected 0) (#2939)`,
);
// Count bare s.currentUnit.startedAt that are NOT inside an if (s.currentUnit) guard.
// The closeout block itself uses s.currentUnit.startedAt inside a guard — that's fine.
// But any usage outside a guard block (e.g. in a return statement) must use optional chaining.
// We check that all return statements use optional chaining.
const returnWithBareAccess = /return\s*\{[^}]*s\.currentUnit\.startedAt/g;
const bareReturnCount = [...afterCloseout.matchAll(returnWithBareAccess)]
.length;
assertTrue(
bareReturnCount === 0,
`no return statements use bare s.currentUnit.startedAt (found ${bareReturnCount}, expected 0) (#2939)`,
);
// ── Test 4: the return at end of runUnitPhase uses optional chaining ────
// The final `return { action: "next", data: { unitStartedAt: s.currentUnit?.startedAt } }`
// must use optional chaining.
const finalReturnPattern = /unitStartedAt:\s*s\.currentUnit\?\.startedAt/;
assertTrue(
finalReturnPattern.test(afterCloseout),
"final return uses s.currentUnit?.startedAt with optional chaining (#2939)",
);
report();
const finalReturnPattern = /unitStartedAt:\s*s\.currentUnit\?\.startedAt/;
assert.ok(finalReturnPattern.test(afterCloseout));
});
});

View file

@ -11,15 +11,10 @@
* silently replaces richer content with a stripped-down version.
*/
import assert from "node:assert/strict";
import type { TaskRow } from "../sf-db.ts";
import { renderSummaryContent } from "../workflow-projections.ts";
import { createTestContext } from "./test-helpers.ts";
const { assertTrue, report } = createTestContext();
// ═══════════════════════════════════════════════════════════════════════════
// Fixtures — same logical data in both shapes
// ═══════════════════════════════════════════════════════════════════════════
import { describe, it } from "vitest";
const SLICE_ID = "S01";
const MILESTONE_ID = "M001";
@ -67,174 +62,91 @@ const verificationEvidence = [
},
];
// ═══════════════════════════════════════════════════════════════════════════
// Tests
// ═══════════════════════════════════════════════════════════════════════════
describe("#2720: summary render parity", () => {
it("includes Verification section", () => {
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assert.ok(output.includes("## Verification"));
});
// Test 1: renderSummaryContent includes Verification section
{
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assertTrue(
output.includes("## Verification"),
"renderSummaryContent must include a ## Verification section",
);
}
it("includes Verification Evidence table", () => {
const output = renderSummaryContent(
taskRow,
SLICE_ID,
MILESTONE_ID,
verificationEvidence,
);
assert.ok(output.includes("## Verification Evidence"));
assert.ok(output.includes("npm test"));
});
// Test 2: renderSummaryContent includes Verification Evidence table
{
const output = renderSummaryContent(
taskRow,
SLICE_ID,
MILESTONE_ID,
verificationEvidence,
);
assertTrue(
output.includes("## Verification Evidence"),
"renderSummaryContent must include a ## Verification Evidence section",
);
assertTrue(
output.includes("npm test"),
"Verification Evidence table must include the command",
);
assertTrue(
output.includes("| Exit Code |") ||
output.includes("exit_code") ||
output.includes("Exit Code"),
"Verification Evidence table must include exit code column",
);
}
it("includes Files Created/Modified section", () => {
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assert.ok(output.includes("## Files Created/Modified"));
assert.ok(output.includes("`src/parser.ts`"));
});
// Test 3: renderSummaryContent includes Files Created/Modified section
{
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assertTrue(
output.includes("## Files Created/Modified"),
"renderSummaryContent must include a ## Files Created/Modified section",
);
assertTrue(
output.includes("`src/parser.ts`"),
"Files section must list key_files as inline code",
);
}
it("one_liner renders as bold (not blockquote)", () => {
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assert.ok(output.includes(`**${taskRow.one_liner}**`));
});
// Test 4: one_liner renders as bold (not blockquote) for consistency
{
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assertTrue(
output.includes(`**${taskRow.one_liner}**`),
"one_liner must render as bold text (not blockquote)",
);
}
it("frontmatter key_files uses YAML list format", () => {
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assert.ok(
output.includes("key_files:\n - src/parser.ts\n - src/lexer.ts"),
);
});
// Test 5: frontmatter key_files uses YAML list format (not JSON array)
{
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assertTrue(
output.includes("key_files:\n - src/parser.ts\n - src/lexer.ts"),
"key_files frontmatter must use YAML list format, not JSON array",
);
}
it("frontmatter key_decisions uses YAML list format", () => {
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assert.ok(
output.includes(
"key_decisions:\n - Hand-rolled parser over PEG for 3x throughput",
),
);
});
// Test 6: frontmatter key_decisions uses YAML list format (not JSON array)
{
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assertTrue(
output.includes(
"key_decisions:\n - Hand-rolled parser over PEG for 3x throughput",
),
"key_decisions frontmatter must use YAML list format, not JSON array",
);
}
it("Deviations section always present with 'None.' fallback", () => {
const noDeviations = { ...taskRow, deviations: "" };
const output = renderSummaryContent(noDeviations, SLICE_ID, MILESTONE_ID);
assert.ok(output.includes("## Deviations"));
assert.ok(output.includes("None."));
});
// Test 7: Deviations section always present (with "None." fallback)
{
const noDeviations = { ...taskRow, deviations: "" };
const output = renderSummaryContent(noDeviations, SLICE_ID, MILESTONE_ID);
assertTrue(
output.includes("## Deviations"),
"Deviations section must always be present even when empty",
);
assertTrue(
output.includes("None."),
"Deviations section must show 'None.' when no deviations",
);
}
it("Known Issues section always present", () => {
const noKnownIssues = { ...taskRow, known_issues: "" };
const output = renderSummaryContent(noKnownIssues, SLICE_ID, MILESTONE_ID);
assert.ok(output.includes("## Known Issues"));
});
// Test 8: Known Issues section always present (with "None." fallback)
{
const noKnownIssues = { ...taskRow, known_issues: "" };
const output = renderSummaryContent(noKnownIssues, SLICE_ID, MILESTONE_ID);
assertTrue(
output.includes("## Known Issues"),
"Known Issues section must always be present even when empty",
);
}
it("verification_result frontmatter not double-quoted", () => {
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assert.ok(!output.includes('verification_result: "'));
});
// Test 9: verification_result frontmatter not double-quoted
{
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
// Should be: verification_result: passed (not "passed")
assertTrue(
!output.includes('verification_result: "'),
"verification_result frontmatter value must not be double-quoted",
);
}
it("duration frontmatter not double-quoted", () => {
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assert.ok(!output.includes('duration: "'));
});
// Test 10: duration frontmatter not double-quoted
{
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assertTrue(
!output.includes('duration: "'),
"duration frontmatter value must not be double-quoted",
);
}
it("empty key_files renders YAML placeholder", () => {
const noFiles = { ...taskRow, key_files: [] };
const output = renderSummaryContent(noFiles, SLICE_ID, MILESTONE_ID);
assert.ok(output.includes("key_files:\n - (none)"));
});
// Test 11: empty key_files renders YAML placeholder, not empty array
{
const noFiles = { ...taskRow, key_files: [] };
const output = renderSummaryContent(noFiles, SLICE_ID, MILESTONE_ID);
assertTrue(
output.includes("key_files:\n - (none)"),
"empty key_files must render as YAML list with (none) placeholder",
);
}
it("frontmatter does not contain projection-only fields", () => {
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assert.ok(!output.includes("provides:"));
assert.ok(!output.includes("requires:"));
assert.ok(!output.includes("affects:"));
assert.ok(!output.includes("patterns_established:"));
assert.ok(!output.includes("drill_down_paths:"));
assert.ok(!output.includes("observability_surfaces:"));
});
// Test 12: frontmatter does not contain extra projection-only fields
{
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID);
assertTrue(
!output.includes("provides:"),
"frontmatter must not contain provides field",
);
assertTrue(
!output.includes("requires:"),
"frontmatter must not contain requires field",
);
assertTrue(
!output.includes("affects:"),
"frontmatter must not contain affects field",
);
assertTrue(
!output.includes("patterns_established:"),
"frontmatter must not contain patterns_established field",
);
assertTrue(
!output.includes("drill_down_paths:"),
"frontmatter must not contain drill_down_paths field",
);
assertTrue(
!output.includes("observability_surfaces:"),
"frontmatter must not contain observability_surfaces field",
);
}
// Test 13: no verification evidence renders empty table row
{
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID, []);
assertTrue(
output.includes("No verification commands discovered"),
"Empty evidence array must render placeholder row",
);
}
report();
it("empty verification evidence renders placeholder row", () => {
const output = renderSummaryContent(taskRow, SLICE_ID, MILESTONE_ID, []);
assert.ok(output.includes("No verification commands discovered"));
});
});

View file

@ -1,402 +1,359 @@
// Tests for SF visualizer data loader.
// Verifies the VisualizerData interface shape and source-file contracts.
/**
* Tests for SF visualizer data loader.
* Verifies the VisualizerData interface shape and source-file contracts.
*/
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import { dirname, join } from "node:path";
import { fileURLToPath } from "node:url";
import { describe, it } from "vitest";
const __dirname = dirname(fileURLToPath(import.meta.url));
const dataPath = join(__dirname, "..", "visualizer-data.ts");
const dataSrc = readFileSync(dataPath, "utf-8");
console.log("\n=== visualizer-data.ts source contracts ===");
// Interface exports
assert.ok(
dataSrc.includes("export interface VisualizerData"),
"exports VisualizerData interface",
);
assert.ok(
dataSrc.includes("export interface VisualizerMilestone"),
"exports VisualizerMilestone interface",
);
assert.ok(
dataSrc.includes("export interface VisualizerSlice"),
"exports VisualizerSlice interface",
);
assert.ok(
dataSrc.includes("export interface VisualizerTask"),
"exports VisualizerTask interface",
);
// New interfaces
assert.ok(
dataSrc.includes("export interface CriticalPathInfo"),
"exports CriticalPathInfo interface",
);
assert.ok(
dataSrc.includes("export interface AgentActivityInfo"),
"exports AgentActivityInfo interface",
);
assert.ok(
dataSrc.includes("export interface ChangelogEntry"),
"exports ChangelogEntry interface",
);
assert.ok(
dataSrc.includes("export interface ChangelogInfo"),
"exports ChangelogInfo interface",
);
assert.ok(
dataSrc.includes("export interface SliceVerification"),
"exports SliceVerification interface",
);
assert.ok(
dataSrc.includes("export interface KnowledgeInfo"),
"exports KnowledgeInfo interface",
);
assert.ok(
dataSrc.includes("export interface CapturesInfo"),
"exports CapturesInfo interface",
);
assert.ok(
dataSrc.includes("export interface HealthInfo"),
"exports HealthInfo interface",
);
assert.ok(
dataSrc.includes("export interface VisualizerDiscussionState"),
"exports VisualizerDiscussionState interface",
);
assert.ok(
dataSrc.includes("export type DiscussionState"),
"exports DiscussionState type",
);
assert.ok(
dataSrc.includes("export interface VisualizerSliceRef"),
"exports VisualizerSliceRef interface",
);
assert.ok(
dataSrc.includes("export interface VisualizerSliceActivity"),
"exports VisualizerSliceActivity interface",
);
assert.ok(
dataSrc.includes("export interface VisualizerStats"),
"exports VisualizerStats interface",
);
// Function export
assert.ok(
dataSrc.includes("export async function loadVisualizerData"),
"exports loadVisualizerData function",
);
assert.ok(
dataSrc.includes("export function computeCriticalPath"),
"exports computeCriticalPath function",
);
// Data source usage
assert.ok(
dataSrc.includes("deriveState"),
"uses deriveState for state derivation",
);
assert.ok(
dataSrc.includes("findMilestoneIds"),
"uses findMilestoneIds to enumerate milestones",
);
assert.ok(
dataSrc.includes("parseRoadmap"),
"uses parseRoadmap for roadmap parsing",
);
assert.ok(dataSrc.includes("parsePlan"), "uses parsePlan for plan parsing");
assert.ok(
dataSrc.includes("parseSummary"),
"uses parseSummary for changelog parsing",
);
assert.ok(
dataSrc.includes("getLedger"),
"uses getLedger for in-memory metrics",
);
assert.ok(
dataSrc.includes("loadLedgerFromDisk"),
"uses loadLedgerFromDisk as fallback",
);
assert.ok(
dataSrc.includes("getProjectTotals"),
"uses getProjectTotals for aggregation",
);
assert.ok(dataSrc.includes("aggregateByPhase"), "uses aggregateByPhase");
assert.ok(dataSrc.includes("aggregateBySlice"), "uses aggregateBySlice");
assert.ok(dataSrc.includes("aggregateByModel"), "uses aggregateByModel");
assert.ok(dataSrc.includes("aggregateByTier"), "uses aggregateByTier");
assert.ok(dataSrc.includes("formatTierSavings"), "uses formatTierSavings");
assert.ok(dataSrc.includes("loadAllCaptures"), "uses loadAllCaptures");
assert.ok(
dataSrc.includes("countPendingCaptures"),
"uses countPendingCaptures",
);
assert.ok(
dataSrc.includes("loadEffectiveSFPreferences"),
"uses loadEffectiveSFPreferences",
);
assert.ok(
dataSrc.includes("resolveSfRootFile"),
"uses resolveSfRootFile for KNOWLEDGE path",
);
// Interface fields
assert.ok(
dataSrc.includes("dependsOn: string[]"),
"VisualizerMilestone has dependsOn field",
);
assert.ok(
dataSrc.includes("depends: string[]"),
"VisualizerSlice has depends field",
);
assert.ok(
dataSrc.includes("totals: ProjectTotals | null"),
"VisualizerData has nullable totals",
);
assert.ok(
dataSrc.includes("units: UnitMetrics[]"),
"VisualizerData has units array",
);
assert.ok(
dataSrc.includes("estimate?: string"),
"VisualizerTask has optional estimate field",
);
// New data model fields
assert.ok(
dataSrc.includes("criticalPath: CriticalPathInfo"),
"VisualizerData has criticalPath field",
);
assert.ok(
dataSrc.includes("remainingSliceCount: number"),
"VisualizerData has remainingSliceCount field",
);
assert.ok(
dataSrc.includes("agentActivity: AgentActivityInfo | null"),
"VisualizerData has agentActivity field",
);
assert.ok(
dataSrc.includes("changelog: ChangelogInfo"),
"VisualizerData has changelog field",
);
assert.ok(
dataSrc.includes("sliceVerifications: SliceVerification[]"),
"VisualizerData has sliceVerifications field",
);
assert.ok(
dataSrc.includes("knowledge: KnowledgeInfo"),
"VisualizerData has knowledge field",
);
assert.ok(
dataSrc.includes("captures: CapturesInfo"),
"VisualizerData has captures field",
);
assert.ok(
dataSrc.includes("health: HealthInfo"),
"VisualizerData has health field",
);
assert.ok(
dataSrc.includes("stats: VisualizerStats"),
"VisualizerData has stats field",
);
assert.ok(
dataSrc.includes("discussion: VisualizerDiscussionState[]"),
"VisualizerData has discussion field",
);
assert.ok(
dataSrc.includes("loadDiscussionState"),
"uses loadDiscussionState helper",
);
assert.ok(
dataSrc.includes("buildVisualizerStats"),
"uses buildVisualizerStats helper",
);
assert.ok(
dataSrc.includes("byTier: TierAggregate[]"),
"VisualizerData has byTier field",
);
assert.ok(
dataSrc.includes("tierSavingsLine: string"),
"VisualizerData has tierSavingsLine field",
);
// completedAt must be coerced to String() to handle YAML Date objects (issue #644)
assert.ok(
dataSrc.includes("String(summary.frontmatter.completed_at"),
"completedAt assignment coerces to String() for YAML Date safety",
);
assert.ok(
dataSrc.includes("String(b.completedAt") &&
dataSrc.includes("String(a.completedAt"),
"changelog sort coerces completedAt to String() for YAML Date safety",
);
// Verify overlay source exists and imports data module
const overlayPath = join(__dirname, "..", "visualizer-overlay.ts");
const overlaySrc = readFileSync(overlayPath, "utf-8");
console.log("\n=== visualizer-overlay.ts source contracts ===");
assert.ok(
overlaySrc.includes("export class SFVisualizerOverlay"),
"exports SFVisualizerOverlay class",
);
assert.ok(
overlaySrc.includes("loadVisualizerData"),
"overlay uses loadVisualizerData",
);
assert.ok(
overlaySrc.includes("renderProgressView"),
"overlay delegates to renderProgressView",
);
assert.ok(
overlaySrc.includes("renderDepsView"),
"overlay delegates to renderDepsView",
);
assert.ok(
overlaySrc.includes("renderMetricsView"),
"overlay delegates to renderMetricsView",
);
assert.ok(
overlaySrc.includes("renderTimelineView"),
"overlay delegates to renderTimelineView",
);
assert.ok(
overlaySrc.includes("renderAgentView"),
"overlay delegates to renderAgentView",
);
assert.ok(
overlaySrc.includes("renderChangelogView"),
"overlay delegates to renderChangelogView",
);
assert.ok(
overlaySrc.includes("renderExportView"),
"overlay delegates to renderExportView",
);
assert.ok(
overlaySrc.includes("renderKnowledgeView"),
"overlay delegates to renderKnowledgeView",
);
assert.ok(
overlaySrc.includes("renderCapturesView"),
"overlay delegates to renderCapturesView",
);
assert.ok(
overlaySrc.includes("renderHealthView"),
"overlay delegates to renderHealthView",
);
assert.ok(overlaySrc.includes("handleInput"), "overlay has handleInput method");
assert.ok(overlaySrc.includes("dispose"), "overlay has dispose method");
assert.ok(overlaySrc.includes("wrapInBox"), "overlay has wrapInBox helper");
assert.ok(overlaySrc.includes("activeTab"), "overlay tracks active tab");
assert.ok(
overlaySrc.includes("scrollOffsets"),
"overlay tracks per-tab scroll offsets",
);
assert.ok(overlaySrc.includes("filterMode"), "overlay has filterMode state");
assert.ok(overlaySrc.includes("filterText"), "overlay has filterText state");
assert.ok(overlaySrc.includes("filterField"), "overlay has filterField state");
assert.ok(overlaySrc.includes("TAB_COUNT"), "overlay defines TAB_COUNT");
assert.ok(overlaySrc.includes("0 Export"), "overlay has 10 tab labels");
// Verify commands/handlers/core.ts integration
const coreHandlerPath = join(
__dirname,
"..",
"commands",
"handlers",
"core.ts",
);
const coreHandlerSrc = readFileSync(coreHandlerPath, "utf-8");
describe("visualizer-data.ts source contracts", () => {
// Interface exports
it("exports VisualizerData interface", () => {
assert.ok(dataSrc.includes("export interface VisualizerData"));
});
console.log("\n=== commands/handlers/core.ts integration ===");
it("exports VisualizerMilestone interface", () => {
assert.ok(dataSrc.includes("export interface VisualizerMilestone"));
});
assert.ok(
coreHandlerSrc.includes('"visualize"'),
"core.ts has visualize in subcommands array",
);
it("exports VisualizerSlice interface", () => {
assert.ok(dataSrc.includes("export interface VisualizerSlice"));
});
assert.ok(
coreHandlerSrc.includes("SFVisualizerOverlay"),
"core.ts imports SFVisualizerOverlay",
);
it("exports VisualizerTask interface", () => {
assert.ok(dataSrc.includes("export interface VisualizerTask"));
});
assert.ok(
coreHandlerSrc.includes("handleVisualize"),
"core.ts has handleVisualize handler",
);
it("exports CriticalPathInfo interface", () => {
assert.ok(dataSrc.includes("export interface CriticalPathInfo"));
});
it("exports AgentActivityInfo interface", () => {
assert.ok(dataSrc.includes("export interface AgentActivityInfo"));
});
it("exports ChangelogEntry interface", () => {
assert.ok(dataSrc.includes("export interface ChangelogEntry"));
});
it("exports ChangelogInfo interface", () => {
assert.ok(dataSrc.includes("export interface ChangelogInfo"));
});
it("exports SliceVerification interface", () => {
assert.ok(dataSrc.includes("export interface SliceVerification"));
});
it("exports KnowledgeInfo interface", () => {
assert.ok(dataSrc.includes("export interface KnowledgeInfo"));
});
it("exports CapturesInfo interface", () => {
assert.ok(dataSrc.includes("export interface CapturesInfo"));
});
it("exports HealthInfo interface", () => {
assert.ok(dataSrc.includes("export interface HealthInfo"));
});
it("exports VisualizerDiscussionState interface", () => {
assert.ok(dataSrc.includes("export interface VisualizerDiscussionState"));
});
it("exports DiscussionState type", () => {
assert.ok(dataSrc.includes("export type DiscussionState"));
});
it("exports VisualizerSliceRef interface", () => {
assert.ok(dataSrc.includes("export interface VisualizerSliceRef"));
});
it("exports VisualizerSliceActivity interface", () => {
assert.ok(dataSrc.includes("export interface VisualizerSliceActivity"));
});
it("exports VisualizerStats interface", () => {
assert.ok(dataSrc.includes("export interface VisualizerStats"));
});
it("exports loadVisualizerData function", () => {
assert.ok(dataSrc.includes("export async function loadVisualizerData"));
});
it("exports computeCriticalPath function", () => {
assert.ok(dataSrc.includes("export function computeCriticalPath"));
});
it("uses deriveState for state derivation", () => {
assert.ok(dataSrc.includes("deriveState"));
});
it("uses findMilestoneIds to enumerate milestones", () => {
assert.ok(dataSrc.includes("findMilestoneIds"));
});
it("uses parseRoadmap for roadmap parsing", () => {
assert.ok(dataSrc.includes("parseRoadmap"));
});
it("uses parsePlan for plan parsing", () => {
assert.ok(dataSrc.includes("parsePlan"));
});
it("uses parseSummary for changelog parsing", () => {
assert.ok(dataSrc.includes("parseSummary"));
});
it("uses getLedger for in-memory metrics", () => {
assert.ok(dataSrc.includes("getLedger"));
});
it("uses loadLedgerFromDisk as fallback", () => {
assert.ok(dataSrc.includes("loadLedgerFromDisk"));
});
it("uses getProjectTotals for aggregation", () => {
assert.ok(dataSrc.includes("getProjectTotals"));
});
it("uses aggregateByPhase", () => {
assert.ok(dataSrc.includes("aggregateByPhase"));
});
it("uses aggregateBySlice", () => {
assert.ok(dataSrc.includes("aggregateBySlice"));
});
it("uses aggregateByModel", () => {
assert.ok(dataSrc.includes("aggregateByModel"));
});
it("uses aggregateByTier", () => {
assert.ok(dataSrc.includes("aggregateByTier"));
});
it("uses formatTierSavings", () => {
assert.ok(dataSrc.includes("formatTierSavings"));
});
it("uses loadAllCaptures", () => {
assert.ok(dataSrc.includes("loadAllCaptures"));
});
it("uses countPendingCaptures", () => {
assert.ok(dataSrc.includes("countPendingCaptures"));
});
it("uses loadEffectiveSFPreferences", () => {
assert.ok(dataSrc.includes("loadEffectiveSFPreferences"));
});
it("uses resolveSfRootFile for KNOWLEDGE path", () => {
assert.ok(dataSrc.includes("resolveSfRootFile"));
});
it("VisualizerMilestone has dependsOn field", () => {
assert.ok(dataSrc.includes("dependsOn: string[]"));
});
it("VisualizerSlice has depends field", () => {
assert.ok(dataSrc.includes("depends: string[]"));
});
it("VisualizerData has nullable totals", () => {
assert.ok(dataSrc.includes("totals: ProjectTotals | null"));
});
it("VisualizerData has units array", () => {
assert.ok(dataSrc.includes("units: UnitMetrics[]"));
});
it("VisualizerTask has optional estimate field", () => {
assert.ok(dataSrc.includes("estimate?: string"));
});
it("VisualizerData has criticalPath field", () => {
assert.ok(dataSrc.includes("criticalPath: CriticalPathInfo"));
});
it("VisualizerData has remainingSliceCount field", () => {
assert.ok(dataSrc.includes("remainingSliceCount: number"));
});
it("VisualizerData has agentActivity field", () => {
assert.ok(dataSrc.includes("agentActivity: AgentActivityInfo | null"));
});
it("VisualizerData has changelog field", () => {
assert.ok(dataSrc.includes("changelog: ChangelogInfo"));
});
it("VisualizerData has sliceVerifications field", () => {
assert.ok(dataSrc.includes("sliceVerifications: SliceVerification[]"));
});
it("VisualizerData has knowledge field", () => {
assert.ok(dataSrc.includes("knowledge: KnowledgeInfo"));
});
it("VisualizerData has captures field", () => {
assert.ok(dataSrc.includes("captures: CapturesInfo"));
});
it("VisualizerData has health field", () => {
assert.ok(dataSrc.includes("health: HealthInfo"));
});
it("VisualizerData has stats field", () => {
assert.ok(dataSrc.includes("stats: VisualizerStats"));
});
it("VisualizerData has discussion field", () => {
assert.ok(dataSrc.includes("discussion: VisualizerDiscussionState[]"));
});
it("uses loadDiscussionState helper", () => {
assert.ok(dataSrc.includes("loadDiscussionState"));
});
it("uses buildVisualizerStats helper", () => {
assert.ok(dataSrc.includes("buildVisualizerStats"));
});
it("VisualizerData has byTier field", () => {
assert.ok(dataSrc.includes("byTier: TierAggregate[]"));
});
it("VisualizerData has tierSavingsLine field", () => {
assert.ok(dataSrc.includes("tierSavingsLine: string"));
});
it("completedAt coerces to String() for YAML Date safety", () => {
assert.ok(dataSrc.includes("String(summary.frontmatter.completed_at"));
});
it("changelog sort coerces completedAt to String() for YAML Date safety", () => {
assert.ok(
dataSrc.includes("String(b.completedAt") &&
dataSrc.includes("String(a.completedAt"),
);
});
});
describe("visualizer-overlay.ts source contracts", () => {
const overlayPath = join(__dirname, "..", "visualizer-overlay.ts");
const overlaySrc = readFileSync(overlayPath, "utf-8");
it("exports SFVisualizerOverlay class", () => {
assert.ok(overlaySrc.includes("export class SFVisualizerOverlay"));
});
it("overlay uses loadVisualizerData", () => {
assert.ok(overlaySrc.includes("loadVisualizerData"));
});
it("overlay delegates to renderProgressView", () => {
assert.ok(overlaySrc.includes("renderProgressView"));
});
it("overlay delegates to renderDepsView", () => {
assert.ok(overlaySrc.includes("renderDepsView"));
});
it("overlay delegates to renderMetricsView", () => {
assert.ok(overlaySrc.includes("renderMetricsView"));
});
it("overlay delegates to renderTimelineView", () => {
assert.ok(overlaySrc.includes("renderTimelineView"));
});
it("overlay delegates to renderAgentView", () => {
assert.ok(overlaySrc.includes("renderAgentView"));
});
it("overlay delegates to renderChangelogView", () => {
assert.ok(overlaySrc.includes("renderChangelogView"));
});
it("overlay delegates to renderExportView", () => {
assert.ok(overlaySrc.includes("renderExportView"));
});
it("overlay delegates to renderKnowledgeView", () => {
assert.ok(overlaySrc.includes("renderKnowledgeView"));
});
it("overlay delegates to renderCapturesView", () => {
assert.ok(overlaySrc.includes("renderCapturesView"));
});
it("overlay delegates to renderHealthView", () => {
assert.ok(overlaySrc.includes("renderHealthView"));
});
it("overlay has handleInput method", () => {
assert.ok(overlaySrc.includes("handleInput"));
});
it("overlay has dispose method", () => {
assert.ok(overlaySrc.includes("dispose"));
});
it("overlay has wrapInBox helper", () => {
assert.ok(overlaySrc.includes("wrapInBox"));
});
it("overlay tracks active tab", () => {
assert.ok(overlaySrc.includes("activeTab"));
});
it("overlay tracks per-tab scroll offsets", () => {
assert.ok(overlaySrc.includes("scrollOffsets"));
});
it("overlay has filterMode state", () => {
assert.ok(overlaySrc.includes("filterMode"));
});
it("overlay has filterText state", () => {
assert.ok(overlaySrc.includes("filterText"));
});
it("overlay has filterField state", () => {
assert.ok(overlaySrc.includes("filterField"));
});
it("overlay defines TAB_COUNT", () => {
assert.ok(overlaySrc.includes("TAB_COUNT"));
});
it("overlay has 10 tab labels", () => {
assert.ok(overlaySrc.includes("0 Export"));
});
});
describe("commands/handlers/core.ts integration", () => {
const coreHandlerPath = join(__dirname, "..", "commands", "handlers", "core.ts");
const coreHandlerSrc = readFileSync(coreHandlerPath, "utf-8");
it("core.ts has visualize in subcommands array", () => {
assert.ok(coreHandlerSrc.includes('"visualize"'));
});
it("core.ts imports SFVisualizerOverlay", () => {
assert.ok(coreHandlerSrc.includes("SFVisualizerOverlay"));
});
it("core.ts has handleVisualize handler", () => {
assert.ok(coreHandlerSrc.includes("handleVisualize"));
});
});

View file

@ -7,85 +7,75 @@
*/
import assert from "node:assert/strict";
import { describe, it } from "vitest";
// ─── shellEscape + path normalization ──────────────────────────────────────
// Replicate the shellEscape helper from cmux/index.ts
function shellEscape(value: string): string {
return `'${value.replace(/'/g, `'\\''`)}'`;
}
// The bashPath pattern used in subagent/index.ts
function bashPath(p: string): string {
return shellEscape(p.replaceAll("\\", "/"));
}
console.log("\n=== Windows backslash path normalization (#1436) ===");
describe("#1436: Windows backslash path normalization", () => {
it("normalises backslash path to forward slashes", () => {
assert.strictEqual(
bashPath("C:\\Users\\user\\project"),
"'C:/Users/user/project'",
);
});
// Backslash paths are converted to forward slashes
assert.deepStrictEqual(
bashPath("C:\\Users\\user\\project"),
"'C:/Users/user/project'",
"backslash path normalised to forward slashes in shell-escaped string",
);
it("Unix paths pass through unchanged", () => {
assert.strictEqual(
bashPath("/home/user/project"),
"'/home/user/project'",
);
});
// Unix paths pass through unchanged
assert.deepStrictEqual(
bashPath("/home/user/project"),
"'/home/user/project'",
"Unix path unchanged",
);
it("mixed separators are normalised", () => {
assert.strictEqual(
bashPath("C:\\Users/user\\project/src"),
"'C:/Users/user/project/src'",
);
});
// Mixed separators are normalised
assert.deepStrictEqual(
bashPath("C:\\Users/user\\project/src"),
"'C:/Users/user/project/src'",
"mixed separators normalised",
);
it("single quotes in path are still properly escaped", () => {
assert.strictEqual(
bashPath("C:\\Users\\o'brien\\project"),
"'C:/Users/o'\\''brien/project'",
);
});
// Paths with single quotes are still properly escaped
assert.deepStrictEqual(
bashPath("C:\\Users\\o'brien\\project"),
"'C:/Users/o'\\''brien/project'",
"single quote in path is escaped after normalisation",
);
it("UNC paths are normalised", () => {
assert.strictEqual(
bashPath("\\\\server\\share\\dir"),
"'//server/share/dir'",
);
});
// UNC paths
assert.deepStrictEqual(
bashPath("\\\\server\\share\\dir"),
"'//server/share/dir'",
"UNC path normalised",
);
it("empty string is handled", () => {
assert.strictEqual(bashPath(""), "''");
});
// Empty string
assert.deepStrictEqual(bashPath(""), "''", "empty string handled");
it("cd command uses forward slashes for Windows worktree path", () => {
const windowsCwd = "C:\\Users\\user\\project\\.sf\\worktrees\\M001";
const cdCommand = `cd ${bashPath(windowsCwd)}`;
assert.strictEqual(
cdCommand,
"cd 'C:/Users/user/project/.sf/worktrees/M001'",
);
assert.ok(
!cdCommand.includes("C:Users"),
"mangled path C:Usersuserproject must not appear",
);
});
// ─── cd command construction ───────────────────────────────────────────────
console.log("\n=== cd command construction with normalised paths ===");
const windowsCwd = "C:\\Users\\user\\project\\.sf\\worktrees\\M001";
const cdCommand = `cd ${bashPath(windowsCwd)}`;
assert.deepStrictEqual(
cdCommand,
"cd 'C:/Users/user/project/.sf/worktrees/M001'",
"cd command uses forward slashes for Windows worktree path",
);
// Verify the mangled form from #1436 is NOT produced
assert.ok(
!cdCommand.includes("C:Users"),
"mangled path C:Usersuserproject must not appear",
);
// ─── Worktree teardown orphan detection ────────────────────────────────────
console.log("\n=== teardown orphan warning path formatting ===");
const windowsWtDir = "C:\\Users\\user\\project\\.sf\\worktrees\\M001";
const helpCommand = `rm -rf "${windowsWtDir.replaceAll("\\", "/")}"`;
assert.deepStrictEqual(
helpCommand,
'rm -rf "C:/Users/user/project/.sf/worktrees/M001"',
"orphan cleanup help command uses forward slashes",
);
it("orphan cleanup help command uses forward slashes", () => {
const windowsWtDir = "C:\\Users\\user\\project\\.sf\\worktrees\\M001";
const helpCommand = `rm -rf "${windowsWtDir.replaceAll("\\", "/")}"`;
assert.strictEqual(
helpCommand,
'rm -rf "C:/Users/user/project/.sf/worktrees/M001"',
);
});
});

View file

@ -15,10 +15,7 @@ import {
openDatabase,
reconcileWorktreeDb,
} from "../sf-db.ts";
// ═══════════════════════════════════════════════════════════════════════════
// Helpers
// ═══════════════════════════════════════════════════════════════════════════
import { describe, it } from "vitest";
function tempDir(): string {
return fs.mkdtempSync(path.join(os.tmpdir(), "sf-wt-test-"));
@ -72,588 +69,265 @@ function seedMainDb(dbPath: string): void {
});
}
// ═══════════════════════════════════════════════════════════════════════════
// copyWorktreeDb tests
// ═══════════════════════════════════════════════════════════════════════════
describe("worktree-db: copyWorktreeDb", () => {
it("copies DB file and data is queryable", () => {
const srcDir = tempDir();
const destDir = tempDir();
const srcDb = path.join(srcDir, "sf.db");
const destDb = path.join(destDir, "nested", "sf.db");
console.log("\n=== worktree-db: copyWorktreeDb ===");
seedMainDb(srcDb);
closeDatabase();
// Test: copies DB file and data is queryable
{
const srcDir = tempDir();
const destDir = tempDir();
const srcDb = path.join(srcDir, "sf.db");
const destDb = path.join(destDir, "nested", "sf.db");
const result = copyWorktreeDb(srcDb, destDb);
assert.strictEqual(result, true);
assert.ok(fs.existsSync(destDb));
seedMainDb(srcDb);
closeDatabase();
openDatabase(destDb);
const d = getDecisionById("D001");
assert.ok(d !== null);
assert.strictEqual(d?.choice, "node:sqlite");
const result = copyWorktreeDb(srcDb, destDb);
assert.ok(result === true, "copyWorktreeDb returns true on success");
assert.ok(fs.existsSync(destDb), "dest DB file exists after copy");
const r = getRequirementById("R001");
assert.ok(r !== null);
assert.strictEqual(r?.description, "Must store decisions");
// Open the copy and verify data is queryable
openDatabase(destDb);
const d = getDecisionById("D001");
assert.ok(d !== null, "decision queryable in copied DB");
assert.deepStrictEqual(
d?.choice,
"node:sqlite",
"decision data preserved in copy",
);
const r = getRequirementById("R001");
assert.ok(r !== null, "requirement queryable in copied DB");
assert.deepStrictEqual(
r?.description,
"Must store decisions",
"requirement data preserved in copy",
);
cleanup(srcDir, destDir);
}
// Test: skips -wal and -shm files
{
const srcDir = tempDir();
const destDir = tempDir();
const srcDb = path.join(srcDir, "sf.db");
const destDb = path.join(destDir, "sf.db");
seedMainDb(srcDb);
closeDatabase();
// Create fake WAL/SHM files
fs.writeFileSync(srcDb + "-wal", "fake wal data");
fs.writeFileSync(srcDb + "-shm", "fake shm data");
copyWorktreeDb(srcDb, destDb);
assert.ok(fs.existsSync(destDb), "DB file copied");
assert.ok(!fs.existsSync(destDb + "-wal"), "WAL file NOT copied");
assert.ok(!fs.existsSync(destDb + "-shm"), "SHM file NOT copied");
cleanup(srcDir, destDir);
}
// Test: returns false when source doesn't exist (no throw)
{
const destDir = tempDir();
const result = copyWorktreeDb(
"/nonexistent/path/sf.db",
path.join(destDir, "sf.db"),
);
assert.deepStrictEqual(result, false, "returns false for missing source");
cleanup(destDir);
}
// Test: creates dest directory if needed
{
const srcDir = tempDir();
const destDir = tempDir();
const srcDb = path.join(srcDir, "sf.db");
const deepDest = path.join(destDir, "a", "b", "c", "sf.db");
seedMainDb(srcDb);
closeDatabase();
const result = copyWorktreeDb(srcDb, deepDest);
assert.ok(result === true, "copyWorktreeDb succeeds with nested dest");
assert.ok(fs.existsSync(deepDest), "DB file created at deeply nested path");
cleanup(srcDir, destDir);
}
// ═══════════════════════════════════════════════════════════════════════════
// reconcileWorktreeDb tests
// ═══════════════════════════════════════════════════════════════════════════
console.log("\n=== worktree-db: reconcileWorktreeDb ===");
// Test: merges new decisions from worktree into main
{
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
// Seed main with D001
seedMainDb(mainDb);
closeDatabase();
// Copy to worktree, add D002 in worktree
copyWorktreeDb(mainDb, wtDb);
openDatabase(wtDb);
insertDecision({
id: "D002",
when_context: "2025-02-01",
scope: "M001/S02",
decision: "Use WAL mode",
choice: "WAL",
rationale: "Performance",
revisable: "yes",
made_by: "agent",
superseded_by: null,
cleanup(srcDir, destDir);
});
closeDatabase();
// Re-open main and reconcile
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
it("skips -wal and -shm files", () => {
const srcDir = tempDir();
const destDir = tempDir();
const srcDb = path.join(srcDir, "sf.db");
const destDb = path.join(destDir, "sf.db");
assert.ok(result.decisions > 0, "decisions merged count > 0");
const d2 = getDecisionById("D002");
assert.ok(d2 !== null, "D002 from worktree now in main");
assert.deepStrictEqual(d2?.choice, "WAL", "D002 data correct after merge");
seedMainDb(srcDb);
closeDatabase();
cleanup(mainDir, wtDir);
}
fs.writeFileSync(srcDb + "-wal", "fake wal data");
fs.writeFileSync(srcDb + "-shm", "fake shm data");
// Test: merges new requirements from worktree into main
{
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
copyWorktreeDb(srcDb, destDb);
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
assert.ok(fs.existsSync(destDb));
assert.ok(!fs.existsSync(destDb + "-wal"));
assert.ok(!fs.existsSync(destDb + "-shm"));
openDatabase(wtDb);
insertRequirement({
id: "R002",
class: "non-functional",
status: "active",
description: "Must be fast",
why: "UX",
source: "design",
primary_owner: "S02",
supporting_slices: "",
validation: "benchmark",
notes: "",
full_content: "Performance requirement",
superseded_by: null,
cleanup(srcDir, destDir);
});
closeDatabase();
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
assert.ok(result.requirements > 0, "requirements merged count > 0");
const r2 = getRequirementById("R002");
assert.ok(r2 !== null, "R002 from worktree now in main");
assert.deepStrictEqual(
r2?.description,
"Must be fast",
"R002 data correct after merge",
);
cleanup(mainDir, wtDir);
}
// Test: merges new artifacts from worktree into main
{
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
openDatabase(wtDb);
insertArtifact({
path: "docs/api.md",
artifact_type: "reference",
milestone_id: "M001",
slice_id: "S01",
task_id: "T01",
full_content: "API documentation",
});
closeDatabase();
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
assert.ok(result.artifacts > 0, "artifacts merged count > 0");
const adapter = _getAdapter()!;
const row = adapter
.prepare("SELECT * FROM artifacts WHERE path = ?")
.get("docs/api.md");
assert.ok(row !== null, "artifact from worktree now in main");
assert.deepStrictEqual(
row?.["artifact_type"],
"reference",
"artifact data correct after merge",
);
cleanup(mainDir, wtDir);
}
// Test: detects conflicts (same PK, different content in both DBs)
{
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
// Seed main with D001
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
// Modify D001 in main
openDatabase(mainDb);
const mainAdapter = _getAdapter()!;
mainAdapter
.prepare(`UPDATE decisions SET choice = 'better-sqlite3' WHERE id = 'D001'`)
.run();
closeDatabase();
// Modify D001 in worktree differently
openDatabase(wtDb);
const wtAdapter = _getAdapter()!;
wtAdapter
.prepare(`UPDATE decisions SET choice = 'sql.js' WHERE id = 'D001'`)
.run();
closeDatabase();
// Reconcile
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
assert.ok(result.conflicts.length > 0, "conflicts detected");
assert.ok(
result.conflicts.some((c) => c.includes("D001")),
"conflict mentions D001",
);
// Worktree-wins: D001 should now have worktree's value
const d1 = getDecisionById("D001");
assert.deepStrictEqual(
d1?.choice,
"sql.js",
"worktree wins on conflict (INSERT OR REPLACE)",
);
cleanup(mainDir, wtDir);
}
// Test: preserves ceremony state when reconciling worktree milestone/slice rows
{
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
openDatabase(mainDb);
_getAdapter()!
.prepare(`
INSERT INTO milestones (
id, title, status, depends_on, created_at, completed_at,
vision, success_criteria, key_risks, proof_strategy,
verification_contract, verification_integration, verification_operational, verification_uat,
definition_of_done, requirement_coverage, boundary_map_markdown, vision_meeting_json
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`)
.run(
"M001",
"Main Milestone",
"active",
"[]",
new Date().toISOString(),
null,
"Main vision",
"[]",
"[]",
"[]",
"",
"",
"",
"",
"[]",
"",
"",
JSON.stringify({
trigger: "Main trigger",
pm: "Main pm",
userAdvocate: "Main user",
customerPanel: "Main customer",
business: "Main business",
researcher: "Main researcher",
deliveryLead: "Main delivery",
partner: "Main partner",
combatant: "Main combatant",
architect: "Main architect",
moderator: "Main moderator",
weightedSynthesis: "Main synthesis",
confidenceByArea: "- restore: medium",
recommendedRoute: "researching",
}),
it("returns false when source doesn't exist", () => {
const destDir = tempDir();
const result = copyWorktreeDb(
"/nonexistent/path/sf.db",
path.join(destDir, "sf.db"),
);
_getAdapter()!
.prepare(`
INSERT INTO slices (
milestone_id, id, title, status, risk, depends, demo, created_at, completed_at,
full_summary_md, full_uat_md, goal, success_criteria, proof_level, integration_closure,
observability_impact, adversarial_partner, adversarial_combatant, adversarial_architect,
planning_meeting_json, sequence, replan_triggered_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`)
.run(
"M001",
"S01",
"Main Slice",
"pending",
"low",
"[]",
"",
new Date().toISOString(),
null,
"",
"",
"Main goal",
"",
"",
"",
"",
"Main partner",
"Main combatant",
"Main architect",
JSON.stringify({
trigger: "Main trigger",
pm: "Main pm",
researcher: "Main researcher",
partner: "Main partner",
combatant: "Main combatant",
architect: "Main architect",
moderator: "Main moderator",
recommendedRoute: "researching",
confidenceSummary: "Main confidence",
}),
1,
null,
);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
openDatabase(wtDb);
_getAdapter()!
.prepare(`UPDATE milestones SET vision_meeting_json = ? WHERE id = 'M001'`)
.run(
JSON.stringify({
trigger: "Worktree trigger",
pm: "Worktree pm",
userAdvocate: "Worktree user",
customerPanel: "Worktree customer",
business: "Worktree business",
researcher: "Worktree researcher",
deliveryLead: "Worktree delivery",
partner: "Worktree partner",
combatant: "Worktree combatant",
architect: "Worktree architect",
moderator: "Worktree moderator",
weightedSynthesis: "Worktree synthesis",
confidenceByArea: "- restore: high",
recommendedRoute: "planning",
}),
);
_getAdapter()!
.prepare(`
UPDATE slices
SET adversarial_partner = ?, adversarial_combatant = ?, adversarial_architect = ?, planning_meeting_json = ?
WHERE milestone_id = 'M001' AND id = 'S01'
`)
.run(
"Worktree partner",
"Worktree combatant",
"Worktree architect",
JSON.stringify({
trigger: "Worktree trigger",
pm: "Worktree pm",
researcher: "Worktree researcher",
partner: "Worktree partner",
combatant: "Worktree combatant",
architect: "Worktree architect",
moderator: "Worktree moderator",
recommendedRoute: "planning",
confidenceSummary: "Worktree confidence",
}),
);
closeDatabase();
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
assert.ok(result.milestones > 0, "milestone rows merged count > 0");
assert.ok(result.slices > 0, "slice rows merged count > 0");
const milestoneRow = _getAdapter()!
.prepare(`SELECT vision_meeting_json FROM milestones WHERE id = 'M001'`)
.get() as Record<string, unknown>;
const sliceRow = _getAdapter()!
.prepare(`
SELECT adversarial_partner, adversarial_combatant, adversarial_architect, planning_meeting_json
FROM slices WHERE milestone_id = 'M001' AND id = 'S01'
`)
.get() as Record<string, unknown>;
assert.match(
String(milestoneRow["vision_meeting_json"] ?? ""),
/Worktree synthesis/,
);
assert.equal(sliceRow["adversarial_partner"], "Worktree partner");
assert.equal(sliceRow["adversarial_combatant"], "Worktree combatant");
assert.equal(sliceRow["adversarial_architect"], "Worktree architect");
assert.match(
String(sliceRow["planning_meeting_json"] ?? ""),
/Worktree confidence/,
);
cleanup(mainDir, wtDir);
}
// Test: handles missing worktree DB gracefully
{
const mainDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
seedMainDb(mainDb);
const result = reconcileWorktreeDb(mainDb, "/nonexistent/worktree.db");
assert.deepStrictEqual(
result.decisions,
0,
"no decisions merged for missing worktree DB",
);
assert.deepStrictEqual(
result.requirements,
0,
"no requirements merged for missing worktree DB",
);
assert.deepStrictEqual(
result.artifacts,
0,
"no artifacts merged for missing worktree DB",
);
assert.deepStrictEqual(
result.conflicts.length,
0,
"no conflicts for missing worktree DB",
);
cleanup(mainDir);
}
// Test: path with spaces works
{
const baseDir = tempDir();
const mainDir = path.join(baseDir, "main dir");
const wtDir = path.join(baseDir, "worktree dir");
fs.mkdirSync(mainDir, { recursive: true });
fs.mkdirSync(wtDir, { recursive: true });
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
// Add a decision in worktree
openDatabase(wtDb);
insertDecision({
id: "D003",
when_context: "2025-03-01",
scope: "M001/S03",
decision: "Path spaces test",
choice: "yes",
rationale: "Robustness",
revisable: "no",
made_by: "agent",
superseded_by: null,
});
closeDatabase();
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
assert.ok(result.decisions > 0, "reconciliation works with spaces in path");
const d3 = getDecisionById("D003");
assert.ok(d3 !== null, "D003 merged from worktree with spaces in path");
cleanup(baseDir);
}
// Test: main DB is usable after reconciliation (DETACH cleanup verified)
{
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
openDatabase(mainDb);
reconcileWorktreeDb(mainDb, wtDb);
// Verify main DB is still fully usable after DETACH
assert.ok(isDbAvailable(), "DB still available after reconciliation");
insertDecision({
id: "D099",
when_context: "2025-12-01",
scope: "test",
decision: "Post-reconcile insert",
choice: "works",
rationale: "Verify DETACH cleanup",
revisable: "no",
made_by: "agent",
superseded_by: null,
assert.strictEqual(result, false);
cleanup(destDir);
});
const d99 = getDecisionById("D099");
assert.ok(d99 !== null, "can insert and query after reconciliation");
assert.deepStrictEqual(d99?.choice, "works", "post-reconcile data correct");
it("creates dest directory if needed", () => {
const srcDir = tempDir();
const destDir = tempDir();
const srcDb = path.join(srcDir, "sf.db");
const deepDest = path.join(destDir, "a", "b", "c", "sf.db");
// Verify no "wt" database still attached
const adapter = _getAdapter()!;
let wtAccessible = false;
try {
adapter.prepare("SELECT count(*) FROM wt.decisions").get();
wtAccessible = true;
} catch {
// Expected — wt should be detached
}
assert.ok(!wtAccessible, "wt database is detached after reconciliation");
seedMainDb(srcDb);
closeDatabase();
cleanup(mainDir, wtDir);
}
const result = copyWorktreeDb(srcDb, deepDest);
assert.strictEqual(result, true);
assert.ok(fs.existsSync(deepDest));
// Test: reconcile with empty worktree DB (no new rows, no conflicts)
{
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
cleanup(srcDir, destDir);
});
});
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
describe("worktree-db: reconcileWorktreeDb", () => {
it("merges new decisions from worktree into main", () => {
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
// Don't modify the worktree DB at all — reconcile the identical copy
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
seedMainDb(mainDb);
closeDatabase();
// Should still report counts for the existing rows (INSERT OR REPLACE touches them)
assert.ok(
result.conflicts.length === 0,
"no conflicts when DBs are identical",
);
assert.ok(isDbAvailable(), "DB usable after no-change reconciliation");
copyWorktreeDb(mainDb, wtDb);
openDatabase(wtDb);
insertDecision({
id: "D002",
when_context: "2025-02-01",
scope: "M001/S02",
decision: "Use WAL mode",
choice: "WAL",
rationale: "Performance",
revisable: "yes",
made_by: "agent",
superseded_by: null,
});
closeDatabase();
cleanup(mainDir, wtDir);
}
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
// ─── Final Report ──────────────────────────────────────────────────────────
assert.ok(result.decisions > 0);
const d2 = getDecisionById("D002");
assert.ok(d2 !== null);
assert.strictEqual(d2?.choice, "WAL");
cleanup(mainDir, wtDir);
});
it("merges new requirements from worktree into main", () => {
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
openDatabase(wtDb);
insertRequirement({
id: "R002",
class: "non-functional",
status: "active",
description: "Must be fast",
why: "UX",
source: "design",
primary_owner: "S02",
supporting_slices: "",
validation: "benchmark",
notes: "",
full_content: "Performance requirement",
superseded_by: null,
});
closeDatabase();
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
assert.ok(result.requirements > 0);
const r2 = getRequirementById("R002");
assert.ok(r2 !== null);
assert.strictEqual(r2?.description, "Must be fast");
cleanup(mainDir, wtDir);
});
it("detects conflicts and applies INSERT OR REPLACE (worktree wins)", () => {
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
openDatabase(mainDb);
_getAdapter()!
.prepare(`UPDATE decisions SET choice = 'better-sqlite3' WHERE id = 'D001'`)
.run();
closeDatabase();
openDatabase(wtDb);
_getAdapter()!
.prepare(`UPDATE decisions SET choice = 'sql.js' WHERE id = 'D001'`)
.run();
closeDatabase();
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
assert.ok(result.conflicts.length > 0);
assert.ok(
result.conflicts.some((c) => c.includes("D001")),
);
const d1 = getDecisionById("D001");
assert.strictEqual(d1?.choice, "sql.js");
cleanup(mainDir, wtDir);
});
it("handles missing worktree DB gracefully", () => {
const mainDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
seedMainDb(mainDb);
const result = reconcileWorktreeDb(mainDb, "/nonexistent/worktree.db");
assert.strictEqual(result.decisions, 0);
assert.strictEqual(result.requirements, 0);
assert.strictEqual(result.artifacts, 0);
assert.strictEqual(result.conflicts.length, 0);
cleanup(mainDir);
});
it("main DB is usable after reconciliation", () => {
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
openDatabase(mainDb);
reconcileWorktreeDb(mainDb, wtDb);
assert.ok(isDbAvailable());
insertDecision({
id: "D099",
when_context: "2025-12-01",
scope: "test",
decision: "Post-reconcile insert",
choice: "works",
rationale: "Verify DETACH cleanup",
revisable: "no",
made_by: "agent",
superseded_by: null,
});
const d99 = getDecisionById("D099");
assert.ok(d99 !== null);
assert.strictEqual(d99?.choice, "works");
const adapter = _getAdapter()!;
let wtAccessible = false;
try {
adapter.prepare("SELECT count(*) FROM wt.decisions").get();
wtAccessible = true;
} catch {
// Expected — wt should be detached
}
assert.ok(!wtAccessible, "wt database is detached after reconciliation");
cleanup(mainDir, wtDir);
});
it("reconcile with empty worktree DB produces no conflicts", () => {
const mainDir = tempDir();
const wtDir = tempDir();
const mainDb = path.join(mainDir, "sf.db");
const wtDb = path.join(wtDir, "sf.db");
seedMainDb(mainDb);
closeDatabase();
copyWorktreeDb(mainDb, wtDb);
openDatabase(mainDb);
const result = reconcileWorktreeDb(mainDb, wtDb);
assert.strictEqual(result.conflicts.length, 0);
assert.ok(isDbAvailable());
cleanup(mainDir, wtDir);
});
});

View file

@ -11,95 +11,84 @@
* are tracked as regular content instead of unreachable gitlink pointers.
*/
import assert from "node:assert/strict";
import { readFileSync } from "node:fs";
import { join } from "node:path";
import { createTestContext } from "./test-helpers.ts";
const { assertTrue, report } = createTestContext();
import { describe, it } from "vitest";
const srcPath = join(import.meta.dirname, "..", "worktree-manager.ts");
const src = readFileSync(srcPath, "utf-8");
console.log(
"\n=== #2616: Worktree cleanup detects nested .git directories ===",
);
describe("#2616: Worktree cleanup detects nested .git directories", () => {
it("worktree-manager.ts exports removeWorktree", () => {
const removeWorktreeIdx = src.indexOf("export function removeWorktree");
assert.ok(removeWorktreeIdx > 0);
});
// ── Test 1: removeWorktree scans for nested .git directories ─────────
it("removeWorktree detects nested .git directories or gitlinks", () => {
const removeWorktreeIdx = src.indexOf("export function removeWorktree");
const fnBody = src.slice(removeWorktreeIdx, removeWorktreeIdx + 5000);
const removeWorktreeIdx = src.indexOf("export function removeWorktree");
assertTrue(removeWorktreeIdx > 0, "worktree-manager.ts exports removeWorktree");
const detectsNestedGit =
(fnBody.includes("nested") && fnBody.includes(".git")) ||
fnBody.includes("gitlink") ||
fnBody.includes("160000") ||
fnBody.includes("findNestedGitDirs") ||
fnBody.includes("nestedGitDirs");
const fnBody = src.slice(removeWorktreeIdx, removeWorktreeIdx + 5000);
assert.ok(detectsNestedGit);
});
const detectsNestedGit =
(fnBody.includes("nested") && fnBody.includes(".git")) ||
fnBody.includes("gitlink") ||
fnBody.includes("160000") ||
fnBody.includes("findNestedGitDirs") ||
fnBody.includes("nestedGitDirs");
it("worktree-manager has a helper to find nested .git directories", () => {
const hasNestedGitHelper =
src.includes("findNestedGitDirs") ||
src.includes("detectNestedGitDirs") ||
src.includes("scanNestedGit") ||
src.includes("absorbNestedGit") ||
src.includes("nestedGitDirs");
assertTrue(
detectsNestedGit,
"removeWorktree detects nested .git directories or gitlinks (#2616)",
);
assert.ok(hasNestedGitHelper);
});
// ── Test 2: A helper function exists to find nested .git directories ──
it("removeWorktree absorbs or removes nested .git dirs before cleanup", () => {
const removeWorktreeIdx = src.indexOf("export function removeWorktree");
const fnBody = src.slice(removeWorktreeIdx, removeWorktreeIdx + 5000);
const hasNestedGitHelper =
src.includes("findNestedGitDirs") ||
src.includes("detectNestedGitDirs") ||
src.includes("scanNestedGit") ||
src.includes("absorbNestedGit") ||
src.includes("nestedGitDirs");
assertTrue(
hasNestedGitHelper,
"worktree-manager has a helper to find nested .git directories (#2616)",
);
// ── Test 3: Nested .git dirs are absorbed or removed before cleanup ───
const absorbsOrRemoves =
fnBody.includes("absorb") ||
(fnBody.includes("rmSync") && fnBody.includes("nested")) ||
((fnBody.includes("nestedGitDirs") || fnBody.includes("findNestedGitDirs")) &&
(fnBody.includes("rm") ||
const absorbsOrRemoves =
fnBody.includes("absorb") ||
fnBody.includes("remove")));
(fnBody.includes("rmSync") && fnBody.includes("nested")) ||
((fnBody.includes("nestedGitDirs") ||
fnBody.includes("findNestedGitDirs")) &&
(fnBody.includes("rm") ||
fnBody.includes("absorb") ||
fnBody.includes("remove")));
assertTrue(
absorbsOrRemoves,
"removeWorktree absorbs or removes nested .git dirs before cleanup (#2616)",
);
assert.ok(absorbsOrRemoves);
});
// ── Test 4: A warning is logged when nested .git dirs are found ───────
it("removeWorktree warns when nested .git directories are detected", () => {
const removeWorktreeIdx = src.indexOf("export function removeWorktree");
const fnBody = src.slice(removeWorktreeIdx, removeWorktreeIdx + 5000);
const warnsAboutNestedGit =
(fnBody.includes("nested") && fnBody.includes("logWarning")) ||
(fnBody.includes("gitlink") && fnBody.includes("logWarning")) ||
(fnBody.includes("scaffold") && fnBody.includes("logWarning"));
const warnsAboutNestedGit =
(fnBody.includes("nested") && fnBody.includes("logWarning")) ||
(fnBody.includes("gitlink") && fnBody.includes("logWarning")) ||
(fnBody.includes("scaffold") && fnBody.includes("logWarning"));
assertTrue(
warnsAboutNestedGit,
"removeWorktree warns when nested .git directories are detected (#2616)",
);
assert.ok(warnsAboutNestedGit);
});
// ── Test 5: The findNestedGitDirs helper correctly identifies nested repos ──
// Verify the helper scans subdirectories but skips .sf/, node_modules/, .git/
it("findNestedGitDirs skips node_modules and other excluded directories", () => {
const helperBody = src.includes("findNestedGitDirs")
? src.slice(src.indexOf("findNestedGitDirs"))
: "";
const helperBody = src.includes("findNestedGitDirs")
? src.slice(src.indexOf("findNestedGitDirs"))
: "";
const skipsExcludedDirs =
helperBody.includes("node_modules") ||
helperBody.includes(".sf") ||
helperBody.includes("skip") ||
helperBody.includes("exclude");
const skipsExcludedDirs =
helperBody.includes("node_modules") ||
helperBody.includes(".sf") ||
helperBody.includes("skip") ||
helperBody.includes("exclude");
assertTrue(
skipsExcludedDirs,
"findNestedGitDirs skips node_modules and other excluded directories (#2616)",
);
report();
assert.ok(skipsExcludedDirs);
});
});

View file

@ -20,6 +20,40 @@ const __dirname = dirname(fileURLToPath(import.meta.url));
export default defineConfig({
test: {
// ── File patterns ─────────────────────────────────────────────────────────
// Files without vitest imports (standalone test scripts that run assertions
// directly at module load time — these are skipped by the old node --test
// runner and must be excluded here too to avoid "No test suite found" errors.
exclude: [
// Standalone script-style tests (no describe/test, custom assertEq)
"src/resources/extensions/sf/tests/derive-state-draft.test.ts",
"src/resources/extensions/sf/tests/finalize-timeout-guard.test.ts",
"src/resources/extensions/sf/tests/phases-merge-error-stops-auto.test.ts",
"src/resources/extensions/sf/tests/auto-start-cold-db-bootstrap.test.ts",
"src/resources/extensions/sf/tests/dashboard-model-label-ordering.test.ts",
"src/resources/extensions/sf/tests/complete-slice.test.ts",
"src/resources/extensions/sf/tests/session-lock-transient-read.test.ts",
"src/resources/extensions/sf/tests/quality-gates.test.ts",
"src/resources/extensions/sf/tests/summary-render-parity.test.ts",
"src/resources/extensions/sf/tests/smart-entry-draft.test.ts",
"src/resources/extensions/sf/tests/tool-call-loop-guard.test.ts",
"src/resources/extensions/sf/tests/visualizer-data.test.ts",
"src/resources/extensions/sf/tests/worktree-nested-git-safety.test.ts",
"src/resources/extensions/sf/tests/visualizer-views.test.ts",
"src/resources/extensions/sf/tests/plan-quality-validator.test.ts",
"src/resources/extensions/sf/tests/sqlite-unavailable-gate.test.ts",
"src/resources/extensions/sf/tests/cold-resume-db-reopen.test.ts",
"src/resources/extensions/sf/tests/worktree-db.test.ts",
"src/resources/extensions/sf/tests/visualizer-critical-path.test.ts",
"src/resources/extensions/sf/tests/db-path-worktree-symlink.test.ts",
"src/resources/extensions/sf/tests/workflow-templates.test.ts",
"src/resources/extensions/sf/tests/stalled-tool-recovery.test.ts",
"src/resources/extensions/sf/tests/stop-auto-race-null-unit.test.ts",
"src/resources/extensions/sf/tests/windows-path-normalization.test.ts",
"src/tests/integration/ci_monitor.test.ts",
"src/resources/extensions/vectordrive/tests/manager.test.ts",
"src/resources/extensions/voice/tests/linux-ready.test.ts",
"packages/pi-coding-agent/src/core/lsp/lsp-integration.test.ts",
],
include: [
"src/tests/**/*.test.ts",
"src/tests/**/*.test.mjs",
@ -34,6 +68,16 @@ export default defineConfig({
"src/resources/extensions/async-jobs/*.test.ts",
"src/resources/extensions/browser-tools/tests/*.test.mjs",
"packages/pi-coding-agent/src/**/*.test.ts",
"packages/pi-ai/src/**/*.test.ts",
"packages/pi-agent-core/src/**/*.test.ts",
"packages/pi-tui/src/**/*.test.ts",
"packages/daemon/src/**/*.test.ts",
"packages/mcp-server/src/**/*.test.ts",
"packages/rpc-client/src/**/*.test.ts",
"packages/native/src/**/*.test.mjs",
"web/lib/**/*.test.ts",
"studio/test/**/*.test.mjs",
"scripts/*.test.mjs",
],
// ── Timeouts ──────────────────────────────────────────────────────────────