test: final vitest API migration fixes across all packages and extensions

This commit is contained in:
Mikael Hugo 2026-05-02 04:49:34 +02:00
parent 5cf94c296e
commit e44237e526
33 changed files with 102 additions and 108 deletions

View file

@ -21,26 +21,20 @@ loaded from `~/.sf/agent/extensions/sf/` (compiled JS) are **not** redirected.
## Running tests
**Use the lightweight `--test` runner, not `npm run test:coverage`.**
The coverage runner (`c8` + `--cpu-prof` + `--heap-prof`) spawns 1015 heavy
worker processes per invocation. If a background run is killed or times out,
those workers are left alive, saturating all CPUs (~700% observed).
**Use vitest — no pre-compilation step needed.**
```bash
# Run a specific test file (fast, no coverage overhead):
node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs \
--experimental-strip-types \
--test src/resources/extensions/sf/tests/<name>.test.ts
npx vitest run src/resources/extensions/sf/tests/<name>.test.ts --config vitest.config.ts
# Run the full SF extension test suite:
npm test
```
npm run test:unit
If the machine feels slow, check for stray workers:
```bash
ps aux | grep "heap-prof-interval" | grep -v grep
# Kill them: ... | awk '{print $2}' | xargs kill -9
# Run only tests affected by recent changes (fast feedback loop):
npx vitest run --changed --config vitest.config.ts
# Watch mode for active development:
npx vitest --config vitest.config.ts
```
**Do not use Python for one-off JSON/hash work.** The resource fingerprint in

View file

@ -1,4 +1,4 @@
import { describe, it, afterEach, before, after } from 'vitest';
import { describe, it, afterEach, beforeAll, afterAll } from 'vitest';
import assert from 'node:assert/strict';
import { mkdtempSync, writeFileSync, readFileSync, rmSync, existsSync, mkdirSync } from 'node:fs';
import { join } from 'node:path';
@ -68,13 +68,13 @@ describe('resolveConfigPath', () => {
describe('loadConfig', () => {
// Save and clear DISCORD_BOT_TOKEN for this suite — env override interferes with file-token assertions
let savedToken: string | undefined;
before(() => {
beforeAll(() => {
savedToken = process.env['DISCORD_BOT_TOKEN'];
delete process.env['DISCORD_BOT_TOKEN'];
});
afterEach(() => {}); // cleanup dirs handled by top-level afterEach
// Restore after all tests in this suite
after(() => {
afterAll(() => {
if (savedToken !== undefined) process.env['DISCORD_BOT_TOKEN'] = savedToken;
});
@ -143,11 +143,11 @@ log:
describe('validateConfig', () => {
// Save and clear DISCORD_BOT_TOKEN for tests that don't expect it
let savedToken: string | undefined;
before(() => {
beforeAll(() => {
savedToken = process.env['DISCORD_BOT_TOKEN'];
delete process.env['DISCORD_BOT_TOKEN'];
});
after(() => {
afterAll(() => {
if (savedToken !== undefined) process.env['DISCORD_BOT_TOKEN'] = savedToken;
});

View file

@ -1,7 +1,7 @@
// SF MCP Server — knowledge graph reader tests
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
import { describe, it, before, after, beforeEach, afterEach } from 'vitest';
import { describe, it, beforeAll, afterAll, beforeEach, afterEach } from 'vitest';
import assert from 'node:assert/strict';
import { mkdirSync, writeFileSync, rmSync, existsSync, readFileSync } from 'node:fs';
import { join } from 'node:path';
@ -148,12 +148,12 @@ missing_artifacts: []
describe('buildGraph', () => {
let projectDir: string;
before(() => {
beforeAll(() => {
projectDir = tmpProject();
makeProjectWithArtifacts(projectDir);
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
afterAll(() => rmSync(projectDir, { recursive: true, force: true }));
it('returns nodeCount > 0 for a project with artifacts', async () => {
const graph = await buildGraph(projectDir);
@ -348,13 +348,13 @@ describe('writeGraph', () => {
let projectDir: string;
let graph: KnowledgeGraph;
before(async () => {
beforeAll(async () => {
projectDir = tmpProject();
makeProjectWithArtifacts(projectDir);
graph = await buildGraph(projectDir);
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
afterAll(() => rmSync(projectDir, { recursive: true, force: true }));
it('creates graph.json in .sf/graphs/ after writeGraph()', async () => {
const sfRoot = join(projectDir, '.sf');
@ -453,7 +453,7 @@ describe('graphStatus', () => {
describe('graphQuery', () => {
let projectDir: string;
before(async () => {
beforeAll(async () => {
projectDir = tmpProject();
makeProjectWithArtifacts(projectDir);
const sfRoot = join(projectDir, '.sf');
@ -461,7 +461,7 @@ describe('graphQuery', () => {
await writeGraph(sfRoot, graph);
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
afterAll(() => rmSync(projectDir, { recursive: true, force: true }));
it('returns matching nodes for a known term', async () => {
const result = await graphQuery(projectDir, 'auth');

View file

@ -1,7 +1,7 @@
// SF MCP Server — reader tests
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
import { describe, it, before, after } from 'vitest';
import { describe, it, beforeAll, afterAll } from 'vitest';
import assert from 'node:assert/strict';
import { mkdirSync, writeFileSync, rmSync } from 'node:fs';
import { join } from 'node:path';
@ -38,7 +38,7 @@ function writeFixture(base: string, relPath: string, content: string): void {
describe('readProgress', () => {
let projectDir: string;
before(() => {
beforeAll(() => {
projectDir = tmpProject();
writeFixture(projectDir, '.sf/STATE.md', `# SF State
@ -76,7 +76,7 @@ Execute T02 in S01 — implement token refresh.
mkdirSync(join(projectDir, '.sf/milestones/M003'), { recursive: true });
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
afterAll(() => rmSync(projectDir, { recursive: true, force: true }));
it('parses active milestone from STATE.md', () => {
const result = readProgress(projectDir);
@ -142,7 +142,7 @@ Execute T02 in S01 — implement token refresh.
describe('readRoadmap', () => {
let projectDir: string;
before(() => {
beforeAll(() => {
projectDir = tmpProject();
writeFixture(projectDir, '.sf/milestones/M001/M001-CONTEXT.md', '# M001: Core Setup\n');
@ -183,7 +183,7 @@ Build the foundation for the project.
writeFixture(projectDir, '.sf/milestones/M001/slices/S02/tasks/T02-PLAN.md', '# T02');
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
afterAll(() => rmSync(projectDir, { recursive: true, force: true }));
it('returns milestone structure', () => {
const result = readRoadmap(projectDir);
@ -233,7 +233,7 @@ Build the foundation for the project.
describe('readHistory', () => {
let projectDir: string;
before(() => {
beforeAll(() => {
projectDir = tmpProject();
writeFixture(projectDir, '.sf/metrics.json', JSON.stringify({
version: 1,
@ -265,7 +265,7 @@ describe('readHistory', () => {
}));
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
afterAll(() => rmSync(projectDir, { recursive: true, force: true }));
it('returns all entries sorted by most recent', () => {
const result = readHistory(projectDir);
@ -303,7 +303,7 @@ describe('readHistory', () => {
describe('readCaptures', () => {
let projectDir: string;
before(() => {
beforeAll(() => {
projectDir = tmpProject();
writeFixture(projectDir, '.sf/CAPTURES.md', `# Captures
@ -336,7 +336,7 @@ describe('readCaptures', () => {
`);
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
afterAll(() => rmSync(projectDir, { recursive: true, force: true }));
it('reads all captures', () => {
const result = readCaptures(projectDir, 'all');
@ -379,7 +379,7 @@ describe('readCaptures', () => {
describe('readKnowledge', () => {
let projectDir: string;
before(() => {
beforeAll(() => {
projectDir = tmpProject();
writeFixture(projectDir, '.sf/KNOWLEDGE.md', `# Project Knowledge
@ -404,7 +404,7 @@ describe('readKnowledge', () => {
`);
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
afterAll(() => rmSync(projectDir, { recursive: true, force: true }));
it('reads all knowledge entries', () => {
const result = readKnowledge(projectDir);
@ -443,7 +443,7 @@ describe('readKnowledge', () => {
describe('runDoctorLite', () => {
let projectDir: string;
before(() => {
beforeAll(() => {
projectDir = tmpProject();
// M001: complete milestone (has summary)
@ -467,7 +467,7 @@ describe('runDoctorLite', () => {
mkdirSync(join(projectDir, '.sf/milestones/M003'), { recursive: true });
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
afterAll(() => rmSync(projectDir, { recursive: true, force: true }));
it('detects all-slices-done-missing-summary', () => {
const result = runDoctorLite(projectDir);

View file

@ -35,7 +35,7 @@ if (!native) {
describe("native fd: fuzzyFind()", () => {
test("finds files matching a query", (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "main.rs"), "fn main() {}");
fs.writeFileSync(path.join(tmpDir, "lib.rs"), "pub mod lib;");
@ -53,7 +53,7 @@ describe("native fd: fuzzyFind()", () => {
test("returns empty results for non-matching query", (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "hello.txt"), "hello");
@ -68,7 +68,7 @@ describe("native fd: fuzzyFind()", () => {
test("respects maxResults limit", (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
for (let i = 0; i < 10; i++) {
fs.writeFileSync(path.join(tmpDir, `file${i}.txt`), "content");
@ -86,7 +86,7 @@ describe("native fd: fuzzyFind()", () => {
test("directories have trailing slash and bonus score", (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "models"));
fs.writeFileSync(path.join(tmpDir, "models.ts"), "export {}");
@ -104,7 +104,7 @@ describe("native fd: fuzzyFind()", () => {
test("empty query returns all entries", (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "a.txt"), "a");
fs.writeFileSync(path.join(tmpDir, "b.txt"), "b");
@ -124,7 +124,7 @@ describe("native fd: fuzzyFind()", () => {
test("fuzzy subsequence matching works", (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "MyComponentFile.tsx"), "export {}");
fs.writeFileSync(path.join(tmpDir, "other.txt"), "other");
@ -144,7 +144,7 @@ describe("native fd: fuzzyFind()", () => {
process.env.FS_SCAN_CACHE_TTL_MS = "10000";
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => {
t.afterAll(() => {
native.invalidateFsScanCache(tmpDir);
fs.rmSync(tmpDir, { recursive: true, force: true });
if (previousTtl === undefined) {
@ -176,7 +176,7 @@ describe("native fd: fuzzyFind()", () => {
test("results are sorted by score descending", (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "main.ts"), "");
fs.writeFileSync(path.join(tmpDir, "my_main.ts"), "");

View file

@ -45,7 +45,7 @@ if (!native) {
describe("native glob: glob()", () => {
test("finds files matching a pattern", async (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.ts"), "const a = 1;");
fs.writeFileSync(path.join(tmpDir, "file2.ts"), "const b = 2;");
@ -61,7 +61,7 @@ describe("native glob: glob()", () => {
test("recursive matching into subdirectories", async (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "src"));
fs.mkdirSync(path.join(tmpDir, "src", "nested"));
@ -80,7 +80,7 @@ describe("native glob: glob()", () => {
test("respects maxResults limit", async (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
for (let i = 0; i < 10; i++) {
fs.writeFileSync(path.join(tmpDir, `file${i}.txt`), "");
@ -98,7 +98,7 @@ describe("native glob: glob()", () => {
test("filters by file type (directories only)", async (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "dir1"));
fs.mkdirSync(path.join(tmpDir, "dir2"));
@ -118,7 +118,7 @@ describe("native glob: glob()", () => {
test("respects .gitignore", async (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
// Init a git repo so .gitignore is respected
fs.mkdirSync(path.join(tmpDir, ".git"));
@ -138,7 +138,7 @@ describe("native glob: glob()", () => {
test("includes gitignored files when gitignore=false", async (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, ".git"));
fs.writeFileSync(path.join(tmpDir, ".gitignore"), "ignored.txt\n");
@ -156,7 +156,7 @@ describe("native glob: glob()", () => {
test("skips node_modules by default", async (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "node_modules"));
fs.writeFileSync(path.join(tmpDir, "node_modules", "dep.js"), "");
@ -174,7 +174,7 @@ describe("native glob: glob()", () => {
test("sortByMtime returns most recent first", async (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "old.txt"), "old");
// Ensure different mtime
@ -210,7 +210,7 @@ describe("native glob: glob()", () => {
test("returns mtime for each entry", async (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "test.txt"), "content");

View file

@ -95,7 +95,7 @@ describe("native grep: grep()", () => {
test("returns a promise", async (t) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\n");
@ -112,7 +112,7 @@ describe("native grep: grep()", () => {
test("searches files on disk", async (t) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\nfoo bar\n");
fs.writeFileSync(path.join(tmpDir, "file2.txt"), "hello rust\nbaz qux\n");
@ -134,7 +134,7 @@ describe("native grep: grep()", () => {
test("respects glob filter", async (t) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "code.ts"), "hello typescript\n");
fs.writeFileSync(path.join(tmpDir, "code.js"), "hello javascript\n");
@ -152,7 +152,7 @@ describe("native grep: grep()", () => {
test("respects maxCount", async (t) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
t.afterAll(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
for (let i = 0; i < 10; i++) {
fs.writeFileSync(path.join(tmpDir, `file${i}.txt`), "match_me\n");

View file

@ -38,7 +38,7 @@ const stubModel = { baseUrl: "https://api.anthropic.com" } as Parameters<typeof
test("resolveAnthropicBaseUrl returns model.baseUrl when ANTHROPIC_BASE_URL is unset (#4140)", (t) => {
const saved = process.env.ANTHROPIC_BASE_URL;
t.after(() => {
t.afterAll(() => {
if (saved === undefined) delete process.env.ANTHROPIC_BASE_URL;
else process.env.ANTHROPIC_BASE_URL = saved;
});
@ -49,7 +49,7 @@ test("resolveAnthropicBaseUrl returns model.baseUrl when ANTHROPIC_BASE_URL is u
test("resolveAnthropicBaseUrl prefers ANTHROPIC_BASE_URL over model.baseUrl (#4140)", (t) => {
const saved = process.env.ANTHROPIC_BASE_URL;
t.after(() => {
t.afterAll(() => {
if (saved === undefined) delete process.env.ANTHROPIC_BASE_URL;
else process.env.ANTHROPIC_BASE_URL = saved;
});
@ -60,7 +60,7 @@ test("resolveAnthropicBaseUrl prefers ANTHROPIC_BASE_URL over model.baseUrl (#41
test("resolveAnthropicBaseUrl ignores whitespace-only ANTHROPIC_BASE_URL (#4140)", (t) => {
const saved = process.env.ANTHROPIC_BASE_URL;
t.after(() => {
t.afterAll(() => {
if (saved === undefined) delete process.env.ANTHROPIC_BASE_URL;
else process.env.ANTHROPIC_BASE_URL = saved;
});

View file

@ -282,7 +282,7 @@ describe("AuthStorage — oauth credential for non-OAuth provider (#2083)", () =
// fall-through to env / fallback finds nothing and returns undefined.
const origEnv = process.env.OPENROUTER_API_KEY;
delete process.env.OPENROUTER_API_KEY;
t.after(() => {
t.afterAll(() => {
if (origEnv === undefined) {
delete process.env.OPENROUTER_API_KEY;
} else {
@ -312,7 +312,7 @@ describe("AuthStorage — oauth credential for non-OAuth provider (#2083)", () =
// Simulate OPENROUTER_API_KEY being set via env
const origEnv = process.env.OPENROUTER_API_KEY;
t.after(() => {
t.afterAll(() => {
if (origEnv === undefined) {
delete process.env.OPENROUTER_API_KEY;
} else {
@ -339,7 +339,7 @@ describe("AuthStorage — oauth credential for non-OAuth provider (#2083)", () =
// and the fallback resolver is reached.
const origEnv = process.env.OPENROUTER_API_KEY;
delete process.env.OPENROUTER_API_KEY;
t.after(() => {
t.afterAll(() => {
if (origEnv === undefined) {
delete process.env.OPENROUTER_API_KEY;
} else {

View file

@ -50,7 +50,7 @@ function makeThrowingExtension(eventType: string, error: Error): Extension {
describe("ExtensionRunner.emitToolCall", () => {
it("catches throwing extension handler and routes to emitError", async (t) => {
const dir = mkdtempSync(join(tmpdir(), "runner-test-"));
t.after(() => {
t.afterAll(() => {
rmSync(dir, { recursive: true, force: true });
});

View file

@ -12,7 +12,7 @@ import {
function tmpDir(prefix: string, t: { after: (fn: () => void) => void }): string {
const dir = mkdtempSync(join(tmpdir(), `pi-lh-${prefix}-`));
t.after(() => rmSync(dir, { recursive: true, force: true }));
t.afterAll(() => rmSync(dir, { recursive: true, force: true }));
return dir;
}

View file

@ -27,7 +27,7 @@ function writePackage(root: string, files: Record<string, string>): void {
function createTestDirs(prefix: string, t: { after: (fn: () => void) => void }) {
const root = mkdtempSync(join(tmpdir(), `pi-lifecycle-${prefix}-`));
t.after(() => rmSync(root, { recursive: true, force: true }));
t.afterAll(() => rmSync(root, { recursive: true, force: true }));
const cwd = join(root, "cwd");
const agentDir = join(root, "agent");
const extensionDir = join(root, `ext-${prefix}`);

View file

@ -47,7 +47,7 @@ describe("resolveConfigValue — command allowlist enforcement", () => {
stderrChunks.push(chunk.toString());
return true;
};
t.after(() => {
t.afterAll(() => {
process.stderr.write = originalWrite;
});
@ -77,7 +77,7 @@ describe("resolveConfigValue — command allowlist enforcement", () => {
stderrChunks.push(chunk.toString());
return true;
};
t.after(() => {
t.afterAll(() => {
process.stderr.write = originalWrite;
});
@ -137,7 +137,7 @@ describe("resolveConfigValue — shell operator bypass prevention", () => {
stderrChunks.push(chunk.toString());
return true;
};
t.after(() => {
t.afterAll(() => {
process.stderr.write = originalWrite;
});
@ -154,7 +154,7 @@ describe("resolveConfigValue — caching", () => {
callCount.n++;
return true;
};
t.after(() => {
t.afterAll(() => {
process.stderr.write = originalWrite;
});
@ -172,7 +172,7 @@ describe("resolveConfigValue — caching", () => {
stderrChunks.push(chunk.toString());
return true;
};
t.after(() => {
t.afterAll(() => {
process.stderr.write = originalWrite;
});
@ -199,7 +199,7 @@ describe("REGRESSION #666: non-default credential tool blocked with no override"
stderrChunks.push(chunk.toString());
return true;
};
t.after(() => {
t.afterAll(() => {
process.stderr.write = originalWrite;
});
@ -243,7 +243,7 @@ describe("setAllowedCommandPrefixes — user override", () => {
stderrChunks.push(chunk.toString());
return true;
};
t.after(() => {
t.afterAll(() => {
process.stderr.write = originalWrite;
});
@ -260,7 +260,7 @@ describe("setAllowedCommandPrefixes — user override", () => {
stderrChunks.push(chunk.toString());
return true;
};
t.after(() => {
t.afterAll(() => {
process.stderr.write = originalWrite;
});
@ -278,7 +278,7 @@ describe("setAllowedCommandPrefixes — user override", () => {
stderrChunks.push(chunk.toString());
return true;
};
t.after(() => {
t.afterAll(() => {
process.stderr.write = originalWrite;
});

View file

@ -62,7 +62,7 @@ describe("edit-diff", () => {
it("computes diffs for preview without native helpers", async (t) => {
const dir = mkdtempSync(join(tmpdir(), "edit-diff-test-"));
t.after(() => {
t.afterAll(() => {
rmSync(dir, { recursive: true, force: true });
});

View file

@ -136,7 +136,7 @@ test("buildSystemPrompt: skillFilter that throws falls back to unfiltered list a
const originalWarn = console.warn;
const warnings: string[] = [];
console.warn = (...args: unknown[]) => { warnings.push(args.join(" ")); };
t.after(() => { console.warn = originalWarn; });
t.afterAll(() => { console.warn = originalWarn; });
let prompt = "";
assert.doesNotThrow(() => {

View file

@ -110,7 +110,7 @@ const buildFormAnalysisScript = extractBuildFormAnalysisScript();
let browser;
let page;
before(async () => {
beforeAll(async () => {
browser = await chromium.launch({ headless: true });
const context = await browser.newContext({
viewport: { width: 1280, height: 800 },
@ -119,7 +119,7 @@ before(async () => {
page = await context.newPage();
});
after(async () => {
afterAll(async () => {
if (browser) await browser.close();
});

View file

@ -19,7 +19,7 @@ import {
} from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { after, before, describe, it, vi, afterEach } from 'vitest';
import { afterAll, beforeAll, describe, it, vi, afterEach } from 'vitest';
import type { ExtensionCommandContext } from "@singularity-forge/pi-coding-agent";
import {
discoverClaudePlugins,
@ -137,13 +137,13 @@ describe("TUI Command Flow Tests", { skip: skipReason }, () => {
let _prefsPath: string;
let prefs: Record<string, unknown>;
before(() => {
beforeAll(() => {
tempDir = mkdtempSync(join(tmpdir(), "sf-tui-test-"));
_prefsPath = join(tempDir, "PREFERENCES.md");
prefs = { version: 1 };
});
after(() => {
afterAll(() => {
fixtures?.cleanup();
if (existsSync(tempDir)) {
rmSync(tempDir, { recursive: true, force: true });

View file

@ -66,7 +66,7 @@ afterEach(() => {
tmpDirs.length = 0;
});
before(() => {
beforeAll(() => {
savedCwd = process.cwd();
});

View file

@ -247,7 +247,7 @@ describe("Resolver routing", () => {
describe("Kill switch (SF_ENGINE_BYPASS)", () => {
const originalBypass = process.env.SF_ENGINE_BYPASS;
after(() => {
afterAll(() => {
// Restore original env var state
if (originalBypass === undefined) {
delete process.env.SF_ENGINE_BYPASS;

View file

@ -138,7 +138,7 @@ describe("doctor", async () => {
);
});
after(() => rmSync(tmpBase, { recursive: true, force: true }));
afterAll(() => rmSync(tmpBase, { recursive: true, force: true }));
// ─── Milestone summary detection: missing summary ──────────────────────
test("doctor detects missing milestone summary", async () => {

View file

@ -61,11 +61,11 @@ describe("Live E2E Tests", { skip: skipReason }, () => {
let importer: PluginImporter;
let discoveryResult: DiscoveryResult;
before(() => {
beforeAll(() => {
importer = new PluginImporter();
});
after(() => {
afterAll(() => {
fixtures?.cleanup();
});
@ -471,7 +471,7 @@ describe("Live E2E Tests", { skip: skipReason }, () => {
it("should execute discover → select → validate → manifest without error", () => {
// This test verifies the full pipeline works end-to-end
// Already have discovery from before()
// Already have discovery from beforeAll()
assert.ok(discoveryResult, "Discovery should have completed");
// Select subset

View file

@ -27,7 +27,7 @@ describe("migrate-external worktree guard (#2970)", () => {
let stateDir: string;
let worktreePath: string;
before(() => {
beforeAll(() => {
base = realpathSync(mkdtempSync(join(tmpdir(), "sf-migrate-wt-")));
stateDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-state-")));
process.env.SF_STATE_DIR = stateDir;
@ -51,7 +51,7 @@ describe("migrate-external worktree guard (#2970)", () => {
writeFileSync(join(worktreeSf, "PREFERENCES.md"), "# prefs\n", "utf-8");
});
after(() => {
afterAll(() => {
delete process.env.SF_STATE_DIR;
// Remove worktree before cleaning up
try {

View file

@ -47,7 +47,7 @@ function makeMessageEndLine(cost: number, role = "assistant"): string {
// ─── Tests ────────────────────────────────────────────────────────────────
describe("parallel-worker-monitoring", () => {
after(() => {
afterAll(() => {
resetOrchestrator();
});

View file

@ -64,13 +64,13 @@ describe("project-relocation-recovery (#2750)", () => {
let stateDir: string;
let savedStateDir: string | undefined;
before(() => {
beforeAll(() => {
savedStateDir = process.env.SF_STATE_DIR;
stateDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-reloc-state-")));
process.env.SF_STATE_DIR = stateDir;
});
after(() => {
afterAll(() => {
if (savedStateDir !== undefined) {
process.env.SF_STATE_DIR = savedStateDir;
} else {

View file

@ -50,7 +50,7 @@ describe("repo-identity-worktree", () => {
let worktreePath: string;
let expectedExternalState: string;
before(() => {
beforeAll(() => {
base = realpathSync(mkdtempSync(join(tmpdir(), "sf-repo-identity-")));
stateDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-state-")));
process.env.SF_STATE_DIR = stateDir;
@ -69,7 +69,7 @@ describe("repo-identity-worktree", () => {
expectedExternalState = externalSfRoot(base);
});
after(() => {
afterAll(() => {
delete process.env.SF_PROJECT_ID;
delete process.env.SF_STATE_DIR;
rmSync(base, { recursive: true, force: true });

View file

@ -131,7 +131,7 @@ describe("requirements", () => {
);
});
after(() => {
afterAll(() => {
rmSync(base, { recursive: true, force: true });
});
});

View file

@ -58,7 +58,7 @@ afterEach(() => {
_resetSelfDetectionCache();
});
before(() => {
beforeAll(() => {
// Ensure cleanup even if tests are aborted
process.on("exit", () => {
for (const d of tmpDirs) {

View file

@ -29,7 +29,7 @@ import { _resetServiceCache } from "../worktree.ts";
let originalHome: string | undefined;
let fakeHome: string;
test.before(() => {
test.beforeAll(() => {
originalHome = process.env.HOME;
fakeHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-fake-home-")));
process.env.HOME = fakeHome;
@ -37,7 +37,7 @@ test.before(() => {
_resetServiceCache();
});
test.after(() => {
test.afterAll(() => {
process.env.HOME = originalHome;
_clearSfRootCache();
_resetServiceCache();

View file

@ -34,7 +34,7 @@ import { _resetServiceCache } from "../worktree.ts";
let originalHome: string | undefined;
let fakeHome: string;
test.before(() => {
test.beforeAll(() => {
originalHome = process.env.HOME;
fakeHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-fake-home-")));
process.env.HOME = fakeHome;
@ -42,7 +42,7 @@ test.before(() => {
_resetServiceCache();
});
test.after(() => {
test.afterAll(() => {
process.env.HOME = originalHome;
_clearSfRootCache();
_resetServiceCache();

View file

@ -38,7 +38,7 @@ function initRepo(dir: string): void {
describe("worktree-bugfix", () => {
const dirs: string[] = [];
after(() => {
afterAll(() => {
for (const d of dirs) rmSync(d, { recursive: true, force: true });
});

View file

@ -63,7 +63,7 @@ function createTempRepo(): string {
describe("worktree-teardown-safety", () => {
const dirs: string[] = [];
after(() => {
afterAll(() => {
for (const d of dirs) rmSync(d, { recursive: true, force: true });
report();
});

View file

@ -102,7 +102,7 @@ mkdirSync(join(plainProject, "src"));
// Teardown
// ---------------------------------------------------------------------------
after(() => {
afterAll(() => {
rmSync(tempRoot, { recursive: true, force: true });
rmSync(monorepoPnpm, { recursive: true, force: true });
rmSync(monorepoLerna, { recursive: true, force: true });

View file

@ -117,7 +117,7 @@ const prefsDir = join(tempRoot, "prefs");
mkdirSync(prefsDir);
const prefsPath = join(prefsDir, "web-preferences.json");
after(() => {
afterAll(() => {
rmSync(tempRoot, { recursive: true, force: true });
});