refactor(test): replace try/finally with t.after() in src/tests (o-z) (#2392)
This commit is contained in:
parent
99af6b0315
commit
30775f4dcc
29 changed files with 3360 additions and 3498 deletions
|
|
@ -406,156 +406,144 @@ test("gsd -h is equivalent to --help", async () => {
|
|||
// 13. gsd headless without .gsd/ directory exits 1 with clean error
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
test("gsd headless without .gsd/ directory exits 1 with clean error", async () => {
|
||||
test("gsd headless without .gsd/ directory exits 1 with clean error", async (t) => {
|
||||
const tmpDir = mkdtempSync(join(tmpdir(), "gsd-e2e-no-gsd-"));
|
||||
|
||||
try {
|
||||
const result = await runGsd(["headless"], 10_000, {}, tmpDir);
|
||||
t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); });
|
||||
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`);
|
||||
const result = await runGsd(["headless"], 10_000, {}, tmpDir);
|
||||
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assert.ok(
|
||||
combined.includes(".gsd/") || combined.includes("No .gsd"),
|
||||
`expected .gsd/ missing error, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`);
|
||||
|
||||
assertNoCrashMarkers(combined);
|
||||
} finally {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assert.ok(
|
||||
combined.includes(".gsd/") || combined.includes("No .gsd"),
|
||||
`expected .gsd/ missing error, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
|
||||
assertNoCrashMarkers(combined);
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 14. gsd headless new-milestone without --context exits 1
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
test("gsd headless new-milestone without --context exits 1", async () => {
|
||||
test("gsd headless new-milestone without --context exits 1", async (t) => {
|
||||
const tmpDir = mkdtempSync(join(tmpdir(), "gsd-e2e-no-ctx-"));
|
||||
|
||||
try {
|
||||
const result = await runGsd(["headless", "new-milestone"], 10_000, {}, tmpDir);
|
||||
t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); });
|
||||
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`);
|
||||
const result = await runGsd(["headless", "new-milestone"], 10_000, {}, tmpDir);
|
||||
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assert.ok(
|
||||
combined.includes("context") || combined.includes("--context"),
|
||||
`expected context-required error, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`);
|
||||
|
||||
assertNoCrashMarkers(combined);
|
||||
} finally {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assert.ok(
|
||||
combined.includes("context") || combined.includes("--context"),
|
||||
`expected context-required error, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
|
||||
assertNoCrashMarkers(combined);
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 15. gsd headless --timeout with invalid value exits 1
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
test("gsd headless --timeout with invalid value exits 1", async () => {
|
||||
test("gsd headless --timeout with invalid value exits 1", async (t) => {
|
||||
const tmpDir = mkdtempSync(join(tmpdir(), "gsd-e2e-bad-timeout-"));
|
||||
|
||||
try {
|
||||
const result = await runGsd(
|
||||
["headless", "--timeout", "not-a-number", "auto"],
|
||||
10_000,
|
||||
{},
|
||||
tmpDir,
|
||||
);
|
||||
t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); });
|
||||
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`);
|
||||
const result = await runGsd(
|
||||
["headless", "--timeout", "not-a-number", "auto"],
|
||||
10_000,
|
||||
{},
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assert.ok(
|
||||
combined.includes("timeout") || combined.includes("positive integer"),
|
||||
`expected timeout validation error, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`);
|
||||
|
||||
assertNoCrashMarkers(combined);
|
||||
} finally {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assert.ok(
|
||||
combined.includes("timeout") || combined.includes("positive integer"),
|
||||
`expected timeout validation error, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
|
||||
assertNoCrashMarkers(combined);
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 16. gsd headless --timeout with negative value exits 1
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
test("gsd headless --timeout with negative value exits 1", async () => {
|
||||
test("gsd headless --timeout with negative value exits 1", async (t) => {
|
||||
const tmpDir = mkdtempSync(join(tmpdir(), "gsd-e2e-neg-timeout-"));
|
||||
|
||||
try {
|
||||
const result = await runGsd(
|
||||
["headless", "--timeout", "-5000", "auto"],
|
||||
10_000,
|
||||
{},
|
||||
tmpDir,
|
||||
);
|
||||
t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); });
|
||||
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`);
|
||||
const result = await runGsd(
|
||||
["headless", "--timeout", "-5000", "auto"],
|
||||
10_000,
|
||||
{},
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assert.ok(
|
||||
combined.includes("timeout") || combined.includes("positive integer"),
|
||||
`expected timeout validation error, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`);
|
||||
|
||||
assertNoCrashMarkers(combined);
|
||||
} finally {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assert.ok(
|
||||
combined.includes("timeout") || combined.includes("positive integer"),
|
||||
`expected timeout validation error, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
|
||||
assertNoCrashMarkers(combined);
|
||||
});
|
||||
|
||||
test("gsd headless query returns JSON from the built CLI", async () => {
|
||||
test("gsd headless query returns JSON from the built CLI", async (t) => {
|
||||
const tmpDir = createTempGitRepo("gsd-e2e-query-");
|
||||
|
||||
try {
|
||||
mkdirSync(join(tmpDir, ".gsd", "milestones"), { recursive: true });
|
||||
t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); });
|
||||
|
||||
// Cold packaged startup in a fresh temp repo is now regularly >10s because
|
||||
// the built CLI loads bundled TS resources through jiti before answering.
|
||||
// This command is still healthy; it just needs a realistic timeout budget.
|
||||
const result = await runGsd(["headless", "query"], 30_000, {}, tmpDir);
|
||||
mkdirSync(join(tmpDir, ".gsd", "milestones"), { recursive: true });
|
||||
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`);
|
||||
// Cold packaged startup in a fresh temp repo is now regularly >10s because
|
||||
// the built CLI loads bundled TS resources through jiti before answering.
|
||||
// This command is still healthy; it just needs a realistic timeout budget.
|
||||
const result = await runGsd(["headless", "query"], 30_000, {}, tmpDir);
|
||||
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assertNoCrashMarkers(combined);
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`);
|
||||
|
||||
const snapshot = JSON.parse(result.stdout);
|
||||
assert.equal(typeof snapshot.state?.phase, "string", "query output should include state.phase");
|
||||
} finally {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assertNoCrashMarkers(combined);
|
||||
|
||||
const snapshot = JSON.parse(result.stdout);
|
||||
assert.equal(typeof snapshot.state?.phase, "string", "query output should include state.phase");
|
||||
});
|
||||
|
||||
test("gsd worktree list loads the built worktree CLI without module errors", async () => {
|
||||
test("gsd worktree list loads the built worktree CLI without module errors", async (t) => {
|
||||
const tmpDir = createTempGitRepo("gsd-e2e-worktree-");
|
||||
|
||||
try {
|
||||
// Cold packaged startup in a fresh temp repo is now regularly >10s because
|
||||
// the built CLI loads bundled TS resources through jiti before listing.
|
||||
const result = await runGsd(["worktree", "list"], 30_000, {}, tmpDir);
|
||||
t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); });
|
||||
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`);
|
||||
// Cold packaged startup in a fresh temp repo is now regularly >10s because
|
||||
// the built CLI loads bundled TS resources through jiti before listing.
|
||||
const result = await runGsd(["worktree", "list"], 30_000, {}, tmpDir);
|
||||
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assertNoCrashMarkers(combined);
|
||||
assert.ok(
|
||||
combined.includes("No worktrees") || combined.includes("Worktrees"),
|
||||
`expected worktree CLI output, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
} finally {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
assert.ok(!result.timedOut, "process should not hang");
|
||||
assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`);
|
||||
|
||||
const combined = stripAnsi(result.stdout + result.stderr);
|
||||
assertNoCrashMarkers(combined);
|
||||
assert.ok(
|
||||
combined.includes("No worktrees") || combined.includes("Worktrees"),
|
||||
`expected worktree CLI output, got:\n${combined.slice(0, 500)}`,
|
||||
);
|
||||
});
|
||||
|
||||
// ===========================================================================
|
||||
|
|
|
|||
|
|
@ -97,79 +97,79 @@ function listTarEntries(tarballPath: string): Promise<string[]> {
|
|||
// 1. npm pack produces valid tarball with correct file layout
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
test("npm pack produces tarball with required files", async () => {
|
||||
test("npm pack produces tarball with required files", async (t) => {
|
||||
const sandbox = createNpmSandbox("gsd-pack-test-");
|
||||
const tarballPath = packTarball(sandbox);
|
||||
|
||||
assert.ok(existsSync(tarballPath), "tarball created");
|
||||
|
||||
try {
|
||||
const files = await listTarEntries(tarballPath);
|
||||
|
||||
// Critical files must be present
|
||||
assert.ok(files.some(f => f.includes("dist/loader.js")), "tarball contains dist/loader.js");
|
||||
assert.ok(files.some(f => f.includes("dist/cli.js")), "tarball contains dist/cli.js");
|
||||
assert.ok(files.some(f => f.includes("dist/app-paths.js")), "tarball contains dist/app-paths.js");
|
||||
assert.ok(files.some(f => f.includes("dist/wizard.js")), "tarball contains dist/wizard.js");
|
||||
assert.ok(files.some(f => f.includes("dist/resource-loader.js")), "tarball contains dist/resource-loader.js");
|
||||
assert.ok(files.some(f => f.includes("pkg/package.json")), "tarball contains pkg/package.json");
|
||||
assert.ok(files.some(f => f.includes("src/resources/extensions/gsd/index.ts")), "tarball contains bundled gsd extension");
|
||||
assert.ok(files.some(f => f.includes("scripts/postinstall.js")), "tarball contains postinstall script");
|
||||
|
||||
// pkg/package.json must have piConfig
|
||||
const pkgJson = readFileSync(join(projectRoot, "pkg", "package.json"), "utf-8");
|
||||
const pkg = JSON.parse(pkgJson);
|
||||
assert.equal(pkg.piConfig?.name, "gsd", "pkg/package.json piConfig.name is gsd");
|
||||
assert.equal(pkg.piConfig?.configDir, ".gsd", "pkg/package.json piConfig.configDir is .gsd");
|
||||
} finally {
|
||||
t.after(() => {
|
||||
rmSync(tarballPath, { force: true });
|
||||
rmSync(sandbox.rootDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
const files = await listTarEntries(tarballPath);
|
||||
|
||||
// Critical files must be present
|
||||
assert.ok(files.some(f => f.includes("dist/loader.js")), "tarball contains dist/loader.js");
|
||||
assert.ok(files.some(f => f.includes("dist/cli.js")), "tarball contains dist/cli.js");
|
||||
assert.ok(files.some(f => f.includes("dist/app-paths.js")), "tarball contains dist/app-paths.js");
|
||||
assert.ok(files.some(f => f.includes("dist/wizard.js")), "tarball contains dist/wizard.js");
|
||||
assert.ok(files.some(f => f.includes("dist/resource-loader.js")), "tarball contains dist/resource-loader.js");
|
||||
assert.ok(files.some(f => f.includes("pkg/package.json")), "tarball contains pkg/package.json");
|
||||
assert.ok(files.some(f => f.includes("src/resources/extensions/gsd/index.ts")), "tarball contains bundled gsd extension");
|
||||
assert.ok(files.some(f => f.includes("scripts/postinstall.js")), "tarball contains postinstall script");
|
||||
|
||||
// pkg/package.json must have piConfig
|
||||
const pkgJson = readFileSync(join(projectRoot, "pkg", "package.json"), "utf-8");
|
||||
const pkg = JSON.parse(pkgJson);
|
||||
assert.equal(pkg.piConfig?.name, "gsd", "pkg/package.json piConfig.name is gsd");
|
||||
assert.equal(pkg.piConfig?.configDir, ".gsd", "pkg/package.json piConfig.configDir is .gsd");
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 2. npm pack → install → gsd binary resolves
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
test("tarball installs and gsd binary resolves", async () => {
|
||||
test("tarball installs and gsd binary resolves", async (t) => {
|
||||
const sandbox = createNpmSandbox("gsd-install-test-");
|
||||
const tarballPath = packTarball(sandbox);
|
||||
|
||||
try {
|
||||
// Install from tarball into a temp prefix
|
||||
execFileSync("npm", ["install", "--prefix", sandbox.installPrefix, tarballPath, "--no-save"], {
|
||||
env: sandbox.env,
|
||||
stdio: ["ignore", "ignore", "pipe"],
|
||||
});
|
||||
|
||||
// Verify the gsd bin exists in the installed package
|
||||
const binName = process.platform === "win32" ? "gsd.cmd" : "gsd";
|
||||
const installedBin = join(sandbox.installPrefix, "node_modules", ".bin", binName);
|
||||
assert.ok(existsSync(installedBin), `gsd binary exists in node_modules/.bin/ (${binName})`);
|
||||
|
||||
// Verify loader.js is executable (has shebang)
|
||||
const installedLoader = join(sandbox.installPrefix, "node_modules", "gsd-pi", "dist", "loader.js");
|
||||
const loaderContent = readFileSync(installedLoader, "utf-8");
|
||||
if (process.platform !== "win32") {
|
||||
assert.ok(loaderContent.startsWith("#!/usr/bin/env node"), "loader.js has node shebang");
|
||||
}
|
||||
|
||||
// Verify bundled resources are present
|
||||
const installedGsdExt = join(
|
||||
sandbox.installPrefix,
|
||||
"node_modules",
|
||||
"gsd-pi",
|
||||
"src",
|
||||
"resources",
|
||||
"extensions",
|
||||
"gsd",
|
||||
"index.ts",
|
||||
);
|
||||
assert.ok(existsSync(installedGsdExt), "bundled gsd extension present in installed package");
|
||||
} finally {
|
||||
t.after(() => {
|
||||
rmSync(tarballPath, { force: true });
|
||||
rmSync(sandbox.rootDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
// Install from tarball into a temp prefix
|
||||
execFileSync("npm", ["install", "--prefix", sandbox.installPrefix, tarballPath, "--no-save"], {
|
||||
env: sandbox.env,
|
||||
stdio: ["ignore", "ignore", "pipe"],
|
||||
});
|
||||
|
||||
// Verify the gsd bin exists in the installed package
|
||||
const binName = process.platform === "win32" ? "gsd.cmd" : "gsd";
|
||||
const installedBin = join(sandbox.installPrefix, "node_modules", ".bin", binName);
|
||||
assert.ok(existsSync(installedBin), `gsd binary exists in node_modules/.bin/ (${binName})`);
|
||||
|
||||
// Verify loader.js is executable (has shebang)
|
||||
const installedLoader = join(sandbox.installPrefix, "node_modules", "gsd-pi", "dist", "loader.js");
|
||||
const loaderContent = readFileSync(installedLoader, "utf-8");
|
||||
if (process.platform !== "win32") {
|
||||
assert.ok(loaderContent.startsWith("#!/usr/bin/env node"), "loader.js has node shebang");
|
||||
}
|
||||
|
||||
// Verify bundled resources are present
|
||||
const installedGsdExt = join(
|
||||
sandbox.installPrefix,
|
||||
"node_modules",
|
||||
"gsd-pi",
|
||||
"src",
|
||||
"resources",
|
||||
"extensions",
|
||||
"gsd",
|
||||
"index.ts",
|
||||
);
|
||||
assert.ok(existsSync(installedGsdExt), "bundled gsd extension present in installed package");
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
|
@ -230,7 +230,7 @@ test("gsd launches and loads extensions without errors", async () => {
|
|||
);
|
||||
});
|
||||
|
||||
test("gsd exits early with a clear message when synced resources are newer than the binary", async () => {
|
||||
test("gsd exits early with a clear message when synced resources are newer than the binary", async (t) => {
|
||||
const fakeHome = mkdtempSync(join(tmpdir(), "gsd-version-skew-"));
|
||||
const fakeAgentDir = join(fakeHome, ".gsd", "agent");
|
||||
mkdirSync(fakeAgentDir, { recursive: true });
|
||||
|
|
@ -239,38 +239,36 @@ test("gsd exits early with a clear message when synced resources are newer than
|
|||
JSON.stringify({ gsdVersion: "999.0.0" }),
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await new Promise<{ code: number | null; stderr: string }>((resolve) => {
|
||||
let stderr = "";
|
||||
const child = spawn("node", ["dist/loader.js"], {
|
||||
cwd: projectRoot,
|
||||
env: {
|
||||
...process.env,
|
||||
HOME: fakeHome,
|
||||
BRAVE_API_KEY: "test",
|
||||
BRAVE_ANSWERS_KEY: "test",
|
||||
CONTEXT7_API_KEY: "test",
|
||||
JINA_API_KEY: "test",
|
||||
TAVILY_API_KEY: "test",
|
||||
},
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
});
|
||||
t.after(() => { rmSync(fakeHome, { recursive: true, force: true }); });
|
||||
|
||||
child.stderr.on("data", (data: Buffer) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
child.stdin.end();
|
||||
child.on("close", (code) => {
|
||||
resolve({ code, stderr });
|
||||
});
|
||||
const result = await new Promise<{ code: number | null; stderr: string }>((resolve) => {
|
||||
let stderr = "";
|
||||
const child = spawn("node", ["dist/loader.js"], {
|
||||
cwd: projectRoot,
|
||||
env: {
|
||||
...process.env,
|
||||
HOME: fakeHome,
|
||||
BRAVE_API_KEY: "test",
|
||||
BRAVE_ANSWERS_KEY: "test",
|
||||
CONTEXT7_API_KEY: "test",
|
||||
JINA_API_KEY: "test",
|
||||
TAVILY_API_KEY: "test",
|
||||
},
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
assert.equal(result.code, 1, "startup exits with code 1 on version skew");
|
||||
assert.match(result.stderr, /Version mismatch detected/, "prints a friendly skew header");
|
||||
assert.match(result.stderr, /npm install -g gsd-pi@latest|gsd update/, "prints upgrade guidance");
|
||||
assert.doesNotMatch(result.stderr, /\[gsd\] Extension load error/, "fails before extension loading");
|
||||
} finally {
|
||||
rmSync(fakeHome, { recursive: true, force: true });
|
||||
}
|
||||
child.stderr.on("data", (data: Buffer) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
child.stdin.end();
|
||||
child.on("close", (code) => {
|
||||
resolve({ code, stderr });
|
||||
});
|
||||
});
|
||||
|
||||
assert.equal(result.code, 1, "startup exits with code 1 on version skew");
|
||||
assert.match(result.stderr, /Version mismatch detected/, "prints a friendly skew header");
|
||||
assert.match(result.stderr, /npm install -g gsd-pi@latest|gsd update/, "prints upgrade guidance");
|
||||
assert.doesNotMatch(result.stderr, /\[gsd\] Extension load error/, "fails before extension loading");
|
||||
});
|
||||
|
|
|
|||
|
|
@ -223,7 +223,7 @@ async function readSseEvents(response: Response, count: number, perReadTimeoutMs
|
|||
// Assembled lifecycle test
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
test("assembled lifecycle: boot → onboard → prompt → streaming text → tool execution → blocking UI request → UI response → turn boundary", async () => {
|
||||
test("assembled lifecycle: boot → onboard → prompt → streaming text → tool execution → blocking UI request → UI response → turn boundary", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-assembled", "Assembled Lifecycle Session");
|
||||
|
|
@ -353,231 +353,231 @@ test("assembled lifecycle: boot → onboard → prompt → streaming text → to
|
|||
validateApiKey: async () => ({ ok: true, message: "openai credentials validated" }),
|
||||
});
|
||||
|
||||
try {
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 1: Boot — verify bridge ready, onboarding locked
|
||||
// -----------------------------------------------------------------------
|
||||
const bootResponse = await bootRoute.GET();
|
||||
assert.equal(bootResponse.status, 200, "boot endpoint should respond 200");
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.bridge.phase, "ready", "bridge should be ready after boot");
|
||||
assert.equal(bootPayload.onboarding.locked, true, "onboarding should be locked before setup");
|
||||
assert.equal(bootPayload.onboarding.lockReason, "required_setup", "lock reason should be required_setup");
|
||||
assert.equal(spawnCount, 1, "bridge should have spawned once during boot");
|
||||
|
||||
// Verify prompt is blocked while locked
|
||||
const blockedPrompt = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "should be rejected" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(blockedPrompt.status, 423, "prompt should be locked (423) before onboarding");
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 2: Onboard — save API key, unlock workspace
|
||||
// -----------------------------------------------------------------------
|
||||
const onboardResponse = await onboardingRoute.POST(
|
||||
new Request("http://localhost/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-assembled-test-key",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
assert.equal(onboardResponse.status, 200, "onboarding save_api_key should succeed");
|
||||
const onboardPayload = (await onboardResponse.json()) as any;
|
||||
assert.equal(onboardPayload.onboarding.locked, false, "onboarding should be unlocked after setup");
|
||||
assert.equal(onboardPayload.onboarding.lockReason, null, "lock reason should be null after setup");
|
||||
assert.equal(onboardPayload.onboarding.bridgeAuthRefresh.phase, "succeeded", "bridge auth refresh should succeed");
|
||||
assert.equal(spawnCount, 2, "bridge should have been restarted (spawned again) during auth refresh");
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 3: Subscribe SSE + send prompt
|
||||
// -----------------------------------------------------------------------
|
||||
const sseResponse = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: AbortSignal.timeout(10_000) }),
|
||||
);
|
||||
assert.equal(sseResponse.status, 200, "SSE endpoint should respond 200");
|
||||
assert.equal(
|
||||
sseResponse.headers.get("content-type"),
|
||||
"text/event-stream; charset=utf-8",
|
||||
"SSE should have correct content type",
|
||||
);
|
||||
|
||||
// Start reading SSE events in background (reads until count or timeout)
|
||||
const phase1EventsPromise = readSseEvents(sseResponse, 15, 3_000);
|
||||
|
||||
// Send the prompt — triggers fake child's streaming event sequence
|
||||
const promptResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "deploy the application" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(promptResponse.status, 200, "prompt should succeed after onboarding");
|
||||
const promptPayload = (await promptResponse.json()) as any;
|
||||
assert.equal(promptPayload.success, true, "prompt RPC response should indicate success");
|
||||
assert.equal(promptPayload.command, "prompt", "prompt RPC response should echo command type");
|
||||
|
||||
// Collect Phase 1 SSE events
|
||||
const phase1Events = await phase1EventsPromise;
|
||||
await waitForMicrotasks();
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 4: Verify streaming events arrived via SSE
|
||||
// -----------------------------------------------------------------------
|
||||
const nonStatusEvents = phase1Events.filter((e) => e.type !== "bridge_status");
|
||||
const eventTypes = nonStatusEvents.map((e) => e.type);
|
||||
|
||||
const messageUpdate = nonStatusEvents.find((e) => e.type === "message_update");
|
||||
assert.ok(
|
||||
messageUpdate,
|
||||
`message_update event should arrive via SSE (got types: ${eventTypes.join(", ")})`,
|
||||
);
|
||||
assert.equal(
|
||||
messageUpdate.assistantMessageEvent.type,
|
||||
"text_delta",
|
||||
"message_update should contain a text_delta",
|
||||
);
|
||||
assert.equal(
|
||||
messageUpdate.assistantMessageEvent.delta,
|
||||
"Deploying to production...",
|
||||
"text_delta should carry the expected content",
|
||||
);
|
||||
|
||||
const toolStart = nonStatusEvents.find((e) => e.type === "tool_execution_start");
|
||||
assert.ok(
|
||||
toolStart,
|
||||
`tool_execution_start event should arrive via SSE (got types: ${eventTypes.join(", ")})`,
|
||||
);
|
||||
assert.equal(toolStart.toolCallId, "tc-deploy-1", "tool start should have correct toolCallId");
|
||||
assert.equal(toolStart.toolName, "bash", "tool start should identify the tool name");
|
||||
|
||||
const toolEnd = nonStatusEvents.find((e) => e.type === "tool_execution_end");
|
||||
assert.ok(
|
||||
toolEnd,
|
||||
`tool_execution_end event should arrive via SSE (got types: ${eventTypes.join(", ")})`,
|
||||
);
|
||||
assert.equal(toolEnd.toolCallId, "tc-deploy-1", "tool end should match the tool start");
|
||||
assert.equal(toolEnd.isError, false, "tool execution should not be an error");
|
||||
|
||||
const uiRequest = nonStatusEvents.find((e) => e.type === "extension_ui_request");
|
||||
assert.ok(
|
||||
uiRequest,
|
||||
`extension_ui_request event should arrive via SSE (got types: ${eventTypes.join(", ")})`,
|
||||
);
|
||||
assert.equal(uiRequest.id, "ui-confirm-deploy", "UI request should have the expected id");
|
||||
assert.equal(uiRequest.method, "confirm", "UI request should be a confirm dialog");
|
||||
assert.equal(uiRequest.title, "Confirm deployment", "UI request should have the expected title");
|
||||
assert.equal(
|
||||
uiRequest.message,
|
||||
"Proceed with deploying to production?",
|
||||
"UI request should have the expected message",
|
||||
);
|
||||
|
||||
// Verify correct event ordering: message_update → tool_start → tool_end → ui_request
|
||||
const msgIdx = nonStatusEvents.indexOf(messageUpdate);
|
||||
const toolStartIdx = nonStatusEvents.indexOf(toolStart);
|
||||
const toolEndIdx = nonStatusEvents.indexOf(toolEnd);
|
||||
const uiReqIdx = nonStatusEvents.indexOf(uiRequest);
|
||||
assert.ok(msgIdx < toolStartIdx, "message_update should precede tool_execution_start");
|
||||
assert.ok(toolStartIdx < toolEndIdx, "tool_execution_start should precede tool_execution_end");
|
||||
assert.ok(toolEndIdx < uiReqIdx, "tool_execution_end should precede extension_ui_request");
|
||||
|
||||
// Verify bridge_status events were also delivered (proves SSE fanout is working)
|
||||
const statusEvents = phase1Events.filter((e) => e.type === "bridge_status");
|
||||
assert.ok(statusEvents.length >= 1, "at least one bridge_status event should arrive via SSE");
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 5: Respond to UI request — prove the round-trip
|
||||
// -----------------------------------------------------------------------
|
||||
const sseResponse2 = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: AbortSignal.timeout(10_000) }),
|
||||
);
|
||||
|
||||
// Start reading Phase 2 events in background
|
||||
const phase2EventsPromise = readSseEvents(sseResponse2, 10, 3_000);
|
||||
|
||||
// Send the UI response
|
||||
const uiResponseResult = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
type: "extension_ui_response",
|
||||
id: "ui-confirm-deploy",
|
||||
value: true,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
assert.equal(uiResponseResult.status, 202, "extension_ui_response should return 202 (fire-and-forget)");
|
||||
|
||||
// Wait for microtasks to let the stdin write propagate
|
||||
await waitForMicrotasks();
|
||||
|
||||
// Verify the UI response reached the fake child's stdin (round-trip proof)
|
||||
assert.ok(receivedUiResponse, "UI response should have reached the fake child via bridge stdin");
|
||||
assert.equal(receivedUiResponse.id, "ui-confirm-deploy", "UI response id should match the request");
|
||||
assert.equal(receivedUiResponse.value, true, "UI response value should be delivered intact");
|
||||
|
||||
// Collect Phase 2 SSE events (agent_end + turn_end)
|
||||
const phase2Events = await phase2EventsPromise;
|
||||
await waitForMicrotasks();
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 6: Verify turn boundary events
|
||||
// -----------------------------------------------------------------------
|
||||
const phase2NonStatus = phase2Events.filter((e) => e.type !== "bridge_status");
|
||||
const phase2Types = phase2NonStatus.map((e) => e.type);
|
||||
|
||||
const agentEnd = phase2NonStatus.find((e) => e.type === "agent_end");
|
||||
assert.ok(
|
||||
agentEnd,
|
||||
`agent_end event should arrive via SSE after UI response (got types: ${phase2Types.join(", ")})`,
|
||||
);
|
||||
|
||||
const turnEnd = phase2NonStatus.find((e) => e.type === "turn_end");
|
||||
assert.ok(
|
||||
turnEnd,
|
||||
`turn_end event should arrive via SSE after UI response (got types: ${phase2Types.join(", ")})`,
|
||||
);
|
||||
|
||||
// Verify agent_end precedes turn_end
|
||||
const agentEndIdx = phase2NonStatus.indexOf(agentEnd);
|
||||
const turnEndIdx = phase2NonStatus.indexOf(turnEnd);
|
||||
assert.ok(agentEndIdx < turnEndIdx, "agent_end should precede turn_end");
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Summary assertion: the complete assembled pipeline is proven
|
||||
// -----------------------------------------------------------------------
|
||||
const allEventTypes = [
|
||||
...nonStatusEvents.map((e) => e.type),
|
||||
...phase2NonStatus.map((e) => e.type),
|
||||
];
|
||||
const requiredTypes = [
|
||||
"message_update",
|
||||
"tool_execution_start",
|
||||
"tool_execution_end",
|
||||
"extension_ui_request",
|
||||
"agent_end",
|
||||
"turn_end",
|
||||
];
|
||||
for (const required of requiredTypes) {
|
||||
assert.ok(
|
||||
allEventTypes.includes(required),
|
||||
`complete pipeline must include ${required} (got: ${allEventTypes.join(", ")})`,
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 1: Boot — verify bridge ready, onboarding locked
|
||||
// -----------------------------------------------------------------------
|
||||
const bootResponse = await bootRoute.GET();
|
||||
assert.equal(bootResponse.status, 200, "boot endpoint should respond 200");
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.bridge.phase, "ready", "bridge should be ready after boot");
|
||||
assert.equal(bootPayload.onboarding.locked, true, "onboarding should be locked before setup");
|
||||
assert.equal(bootPayload.onboarding.lockReason, "required_setup", "lock reason should be required_setup");
|
||||
assert.equal(spawnCount, 1, "bridge should have spawned once during boot");
|
||||
|
||||
// Verify prompt is blocked while locked
|
||||
const blockedPrompt = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "should be rejected" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(blockedPrompt.status, 423, "prompt should be locked (423) before onboarding");
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 2: Onboard — save API key, unlock workspace
|
||||
// -----------------------------------------------------------------------
|
||||
const onboardResponse = await onboardingRoute.POST(
|
||||
new Request("http://localhost/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-assembled-test-key",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
assert.equal(onboardResponse.status, 200, "onboarding save_api_key should succeed");
|
||||
const onboardPayload = (await onboardResponse.json()) as any;
|
||||
assert.equal(onboardPayload.onboarding.locked, false, "onboarding should be unlocked after setup");
|
||||
assert.equal(onboardPayload.onboarding.lockReason, null, "lock reason should be null after setup");
|
||||
assert.equal(onboardPayload.onboarding.bridgeAuthRefresh.phase, "succeeded", "bridge auth refresh should succeed");
|
||||
assert.equal(spawnCount, 2, "bridge should have been restarted (spawned again) during auth refresh");
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 3: Subscribe SSE + send prompt
|
||||
// -----------------------------------------------------------------------
|
||||
const sseResponse = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: AbortSignal.timeout(10_000) }),
|
||||
);
|
||||
assert.equal(sseResponse.status, 200, "SSE endpoint should respond 200");
|
||||
assert.equal(
|
||||
sseResponse.headers.get("content-type"),
|
||||
"text/event-stream; charset=utf-8",
|
||||
"SSE should have correct content type",
|
||||
);
|
||||
|
||||
// Start reading SSE events in background (reads until count or timeout)
|
||||
const phase1EventsPromise = readSseEvents(sseResponse, 15, 3_000);
|
||||
|
||||
// Send the prompt — triggers fake child's streaming event sequence
|
||||
const promptResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "deploy the application" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(promptResponse.status, 200, "prompt should succeed after onboarding");
|
||||
const promptPayload = (await promptResponse.json()) as any;
|
||||
assert.equal(promptPayload.success, true, "prompt RPC response should indicate success");
|
||||
assert.equal(promptPayload.command, "prompt", "prompt RPC response should echo command type");
|
||||
|
||||
// Collect Phase 1 SSE events
|
||||
const phase1Events = await phase1EventsPromise;
|
||||
await waitForMicrotasks();
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 4: Verify streaming events arrived via SSE
|
||||
// -----------------------------------------------------------------------
|
||||
const nonStatusEvents = phase1Events.filter((e) => e.type !== "bridge_status");
|
||||
const eventTypes = nonStatusEvents.map((e) => e.type);
|
||||
|
||||
const messageUpdate = nonStatusEvents.find((e) => e.type === "message_update");
|
||||
assert.ok(
|
||||
messageUpdate,
|
||||
`message_update event should arrive via SSE (got types: ${eventTypes.join(", ")})`,
|
||||
);
|
||||
assert.equal(
|
||||
messageUpdate.assistantMessageEvent.type,
|
||||
"text_delta",
|
||||
"message_update should contain a text_delta",
|
||||
);
|
||||
assert.equal(
|
||||
messageUpdate.assistantMessageEvent.delta,
|
||||
"Deploying to production...",
|
||||
"text_delta should carry the expected content",
|
||||
);
|
||||
|
||||
const toolStart = nonStatusEvents.find((e) => e.type === "tool_execution_start");
|
||||
assert.ok(
|
||||
toolStart,
|
||||
`tool_execution_start event should arrive via SSE (got types: ${eventTypes.join(", ")})`,
|
||||
);
|
||||
assert.equal(toolStart.toolCallId, "tc-deploy-1", "tool start should have correct toolCallId");
|
||||
assert.equal(toolStart.toolName, "bash", "tool start should identify the tool name");
|
||||
|
||||
const toolEnd = nonStatusEvents.find((e) => e.type === "tool_execution_end");
|
||||
assert.ok(
|
||||
toolEnd,
|
||||
`tool_execution_end event should arrive via SSE (got types: ${eventTypes.join(", ")})`,
|
||||
);
|
||||
assert.equal(toolEnd.toolCallId, "tc-deploy-1", "tool end should match the tool start");
|
||||
assert.equal(toolEnd.isError, false, "tool execution should not be an error");
|
||||
|
||||
const uiRequest = nonStatusEvents.find((e) => e.type === "extension_ui_request");
|
||||
assert.ok(
|
||||
uiRequest,
|
||||
`extension_ui_request event should arrive via SSE (got types: ${eventTypes.join(", ")})`,
|
||||
);
|
||||
assert.equal(uiRequest.id, "ui-confirm-deploy", "UI request should have the expected id");
|
||||
assert.equal(uiRequest.method, "confirm", "UI request should be a confirm dialog");
|
||||
assert.equal(uiRequest.title, "Confirm deployment", "UI request should have the expected title");
|
||||
assert.equal(
|
||||
uiRequest.message,
|
||||
"Proceed with deploying to production?",
|
||||
"UI request should have the expected message",
|
||||
);
|
||||
|
||||
// Verify correct event ordering: message_update → tool_start → tool_end → ui_request
|
||||
const msgIdx = nonStatusEvents.indexOf(messageUpdate);
|
||||
const toolStartIdx = nonStatusEvents.indexOf(toolStart);
|
||||
const toolEndIdx = nonStatusEvents.indexOf(toolEnd);
|
||||
const uiReqIdx = nonStatusEvents.indexOf(uiRequest);
|
||||
assert.ok(msgIdx < toolStartIdx, "message_update should precede tool_execution_start");
|
||||
assert.ok(toolStartIdx < toolEndIdx, "tool_execution_start should precede tool_execution_end");
|
||||
assert.ok(toolEndIdx < uiReqIdx, "tool_execution_end should precede extension_ui_request");
|
||||
|
||||
// Verify bridge_status events were also delivered (proves SSE fanout is working)
|
||||
const statusEvents = phase1Events.filter((e) => e.type === "bridge_status");
|
||||
assert.ok(statusEvents.length >= 1, "at least one bridge_status event should arrive via SSE");
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 5: Respond to UI request — prove the round-trip
|
||||
// -----------------------------------------------------------------------
|
||||
const sseResponse2 = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: AbortSignal.timeout(10_000) }),
|
||||
);
|
||||
|
||||
// Start reading Phase 2 events in background
|
||||
const phase2EventsPromise = readSseEvents(sseResponse2, 10, 3_000);
|
||||
|
||||
// Send the UI response
|
||||
const uiResponseResult = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
type: "extension_ui_response",
|
||||
id: "ui-confirm-deploy",
|
||||
value: true,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
assert.equal(uiResponseResult.status, 202, "extension_ui_response should return 202 (fire-and-forget)");
|
||||
|
||||
// Wait for microtasks to let the stdin write propagate
|
||||
await waitForMicrotasks();
|
||||
|
||||
// Verify the UI response reached the fake child's stdin (round-trip proof)
|
||||
assert.ok(receivedUiResponse, "UI response should have reached the fake child via bridge stdin");
|
||||
assert.equal(receivedUiResponse.id, "ui-confirm-deploy", "UI response id should match the request");
|
||||
assert.equal(receivedUiResponse.value, true, "UI response value should be delivered intact");
|
||||
|
||||
// Collect Phase 2 SSE events (agent_end + turn_end)
|
||||
const phase2Events = await phase2EventsPromise;
|
||||
await waitForMicrotasks();
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Stage 6: Verify turn boundary events
|
||||
// -----------------------------------------------------------------------
|
||||
const phase2NonStatus = phase2Events.filter((e) => e.type !== "bridge_status");
|
||||
const phase2Types = phase2NonStatus.map((e) => e.type);
|
||||
|
||||
const agentEnd = phase2NonStatus.find((e) => e.type === "agent_end");
|
||||
assert.ok(
|
||||
agentEnd,
|
||||
`agent_end event should arrive via SSE after UI response (got types: ${phase2Types.join(", ")})`,
|
||||
);
|
||||
|
||||
const turnEnd = phase2NonStatus.find((e) => e.type === "turn_end");
|
||||
assert.ok(
|
||||
turnEnd,
|
||||
`turn_end event should arrive via SSE after UI response (got types: ${phase2Types.join(", ")})`,
|
||||
);
|
||||
|
||||
// Verify agent_end precedes turn_end
|
||||
const agentEndIdx = phase2NonStatus.indexOf(agentEnd);
|
||||
const turnEndIdx = phase2NonStatus.indexOf(turnEnd);
|
||||
assert.ok(agentEndIdx < turnEndIdx, "agent_end should precede turn_end");
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Summary assertion: the complete assembled pipeline is proven
|
||||
// -----------------------------------------------------------------------
|
||||
const allEventTypes = [
|
||||
...nonStatusEvents.map((e) => e.type),
|
||||
...phase2NonStatus.map((e) => e.type),
|
||||
];
|
||||
const requiredTypes = [
|
||||
"message_update",
|
||||
"tool_execution_start",
|
||||
"tool_execution_end",
|
||||
"extension_ui_request",
|
||||
"agent_end",
|
||||
"turn_end",
|
||||
];
|
||||
for (const required of requiredTypes) {
|
||||
assert.ok(
|
||||
allEventTypes.includes(required),
|
||||
`complete pipeline must include ${required} (got: ${allEventTypes.join(", ")})`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test("assembled settings controls keep retry visibility and daily-use mutations authoritative", async () => {
|
||||
test("assembled settings controls keep retry visibility and daily-use mutations authoritative", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-settings", "Settings Session");
|
||||
const bridgeCommands: any[] = [];
|
||||
|
|
@ -696,90 +696,90 @@ test("assembled settings controls keep retry visibility and daily-use mutations
|
|||
} as any),
|
||||
});
|
||||
|
||||
try {
|
||||
const bootResponse = await bootRoute.GET();
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.bridge.sessionState.autoRetryEnabled, false);
|
||||
assert.equal(bootPayload.bridge.sessionState.retryInProgress, true);
|
||||
assert.equal(bootPayload.bridge.sessionState.retryAttempt, 2);
|
||||
|
||||
const steeringResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "set_steering_mode", mode: "one-at-a-time" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(steeringResponse.status, 200);
|
||||
const steeringBody = (await steeringResponse.json()) as any;
|
||||
assert.equal(steeringBody.success, true);
|
||||
|
||||
const followUpResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "set_follow_up_mode", mode: "one-at-a-time" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(followUpResponse.status, 502);
|
||||
const followUpBody = (await followUpResponse.json()) as any;
|
||||
assert.equal(followUpBody.success, false);
|
||||
assert.match(followUpBody.error, /follow-up mode rejected/i);
|
||||
|
||||
const autoCompactionResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "set_auto_compaction", enabled: true }),
|
||||
}),
|
||||
);
|
||||
assert.equal(autoCompactionResponse.status, 200);
|
||||
const autoCompactionBody = (await autoCompactionResponse.json()) as any;
|
||||
assert.equal(autoCompactionBody.success, true);
|
||||
|
||||
const autoRetryResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "set_auto_retry", enabled: true }),
|
||||
}),
|
||||
);
|
||||
assert.equal(autoRetryResponse.status, 200);
|
||||
const autoRetryBody = (await autoRetryResponse.json()) as any;
|
||||
assert.equal(autoRetryBody.success, true);
|
||||
|
||||
const abortRetryResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "abort_retry" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(abortRetryResponse.status, 200);
|
||||
const abortRetryBody = (await abortRetryResponse.json()) as any;
|
||||
assert.equal(abortRetryBody.success, true);
|
||||
|
||||
await waitForMicrotasks();
|
||||
|
||||
const refreshedBootResponse = await bootRoute.GET();
|
||||
assert.equal(refreshedBootResponse.status, 200);
|
||||
const refreshedBootPayload = (await refreshedBootResponse.json()) as any;
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.steeringMode, "one-at-a-time");
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.followUpMode, "all");
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.autoCompactionEnabled, true);
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.autoRetryEnabled, true);
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.retryInProgress, false);
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.retryAttempt, 0);
|
||||
|
||||
assert.deepEqual(
|
||||
bridgeCommands.filter((entry) => entry.type !== "get_state").map((entry) => entry.type),
|
||||
["set_steering_mode", "set_follow_up_mode", "set_auto_compaction", "set_auto_retry", "abort_retry"],
|
||||
"settings parity must route through the live bridge instead of browser-local toggles",
|
||||
);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bootResponse = await bootRoute.GET();
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.bridge.sessionState.autoRetryEnabled, false);
|
||||
assert.equal(bootPayload.bridge.sessionState.retryInProgress, true);
|
||||
assert.equal(bootPayload.bridge.sessionState.retryAttempt, 2);
|
||||
|
||||
const steeringResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "set_steering_mode", mode: "one-at-a-time" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(steeringResponse.status, 200);
|
||||
const steeringBody = (await steeringResponse.json()) as any;
|
||||
assert.equal(steeringBody.success, true);
|
||||
|
||||
const followUpResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "set_follow_up_mode", mode: "one-at-a-time" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(followUpResponse.status, 502);
|
||||
const followUpBody = (await followUpResponse.json()) as any;
|
||||
assert.equal(followUpBody.success, false);
|
||||
assert.match(followUpBody.error, /follow-up mode rejected/i);
|
||||
|
||||
const autoCompactionResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "set_auto_compaction", enabled: true }),
|
||||
}),
|
||||
);
|
||||
assert.equal(autoCompactionResponse.status, 200);
|
||||
const autoCompactionBody = (await autoCompactionResponse.json()) as any;
|
||||
assert.equal(autoCompactionBody.success, true);
|
||||
|
||||
const autoRetryResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "set_auto_retry", enabled: true }),
|
||||
}),
|
||||
);
|
||||
assert.equal(autoRetryResponse.status, 200);
|
||||
const autoRetryBody = (await autoRetryResponse.json()) as any;
|
||||
assert.equal(autoRetryBody.success, true);
|
||||
|
||||
const abortRetryResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "abort_retry" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(abortRetryResponse.status, 200);
|
||||
const abortRetryBody = (await abortRetryResponse.json()) as any;
|
||||
assert.equal(abortRetryBody.success, true);
|
||||
|
||||
await waitForMicrotasks();
|
||||
|
||||
const refreshedBootResponse = await bootRoute.GET();
|
||||
assert.equal(refreshedBootResponse.status, 200);
|
||||
const refreshedBootPayload = (await refreshedBootResponse.json()) as any;
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.steeringMode, "one-at-a-time");
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.followUpMode, "all");
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.autoCompactionEnabled, true);
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.autoRetryEnabled, true);
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.retryInProgress, false);
|
||||
assert.equal(refreshedBootPayload.bridge.sessionState.retryAttempt, 0);
|
||||
|
||||
assert.deepEqual(
|
||||
bridgeCommands.filter((entry) => entry.type !== "get_state").map((entry) => entry.type),
|
||||
["set_steering_mode", "set_follow_up_mode", "set_auto_compaction", "set_auto_retry", "abort_retry"],
|
||||
"settings parity must route through the live bridge instead of browser-local toggles",
|
||||
);
|
||||
});
|
||||
|
||||
test("assembled recovery route exposes actionable browser diagnostics without raw transcript leakage", async () => {
|
||||
test("assembled recovery route exposes actionable browser diagnostics without raw transcript leakage", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-recovery", "Recovery Session");
|
||||
|
||||
|
|
@ -873,27 +873,27 @@ test("assembled recovery route exposes actionable browser diagnostics without ra
|
|||
}),
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await recoveryRoute.GET();
|
||||
assert.equal(response.status, 200);
|
||||
const payload = (await response.json()) as any;
|
||||
|
||||
assert.equal(payload.status, "ready");
|
||||
assert.equal(payload.bridge.retry.inProgress, true);
|
||||
assert.equal(payload.bridge.retry.attempt, 2);
|
||||
assert.equal(payload.bridge.authRefresh.phase, "failed");
|
||||
assert.ok(payload.actions.browser.some((action: { id: string }) => action.id === "refresh_diagnostics"));
|
||||
assert.ok(payload.actions.browser.some((action: { id: string }) => action.id === "open_retry_controls"));
|
||||
assert.ok(payload.actions.browser.some((action: { id: string }) => action.id === "open_auth_controls"));
|
||||
assert.equal(payload.interruptedRun.detected, true);
|
||||
assert.doesNotMatch(JSON.stringify(payload), /sk-assembled-recovery-secret-0001|sk-assembled-auth-secret-0002/);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const response = await recoveryRoute.GET();
|
||||
assert.equal(response.status, 200);
|
||||
const payload = (await response.json()) as any;
|
||||
|
||||
assert.equal(payload.status, "ready");
|
||||
assert.equal(payload.bridge.retry.inProgress, true);
|
||||
assert.equal(payload.bridge.retry.attempt, 2);
|
||||
assert.equal(payload.bridge.authRefresh.phase, "failed");
|
||||
assert.ok(payload.actions.browser.some((action: { id: string }) => action.id === "refresh_diagnostics"));
|
||||
assert.ok(payload.actions.browser.some((action: { id: string }) => action.id === "open_retry_controls"));
|
||||
assert.ok(payload.actions.browser.some((action: { id: string }) => action.id === "open_auth_controls"));
|
||||
assert.equal(payload.interruptedRun.detected, true);
|
||||
assert.doesNotMatch(JSON.stringify(payload), /sk-assembled-recovery-secret-0001|sk-assembled-auth-secret-0002/);
|
||||
});
|
||||
|
||||
test("assembled slash-command behavior keeps built-ins safe while preserving GSD prompt commands", async () => {
|
||||
test("assembled slash-command behavior keeps built-ins safe while preserving GSD prompt commands", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-slash", "Slash Session");
|
||||
const bridgeCommands: any[] = [];
|
||||
|
|
@ -966,77 +966,77 @@ test("assembled slash-command behavior keeps built-ins safe while preserving GSD
|
|||
} as any),
|
||||
});
|
||||
|
||||
try {
|
||||
async function submitBrowserInput(input: string): Promise<{ outcome: any; status: number | null; body: any; notice: string | null }> {
|
||||
const outcome = dispatchBrowserSlashCommand(input);
|
||||
|
||||
if (outcome.kind === "prompt" || outcome.kind === "rpc") {
|
||||
const response = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(outcome.command),
|
||||
}),
|
||||
);
|
||||
return {
|
||||
outcome,
|
||||
status: response.status,
|
||||
body: await response.json(),
|
||||
notice: null,
|
||||
};
|
||||
}
|
||||
|
||||
const notice = getBrowserSlashCommandTerminalNotice(outcome)?.message ?? null;
|
||||
return {
|
||||
outcome,
|
||||
status: null,
|
||||
body: null,
|
||||
notice,
|
||||
};
|
||||
}
|
||||
|
||||
const builtInExecution = await submitBrowserInput("/new");
|
||||
assert.equal(builtInExecution.outcome.kind, "rpc");
|
||||
assert.equal(builtInExecution.status, 200);
|
||||
assert.equal(builtInExecution.body.command, "new_session");
|
||||
|
||||
const builtInSurface = await submitBrowserInput("/model");
|
||||
assert.equal(builtInSurface.outcome.kind, "surface");
|
||||
assert.equal(builtInSurface.outcome.surface, "model");
|
||||
assert.equal(builtInSurface.status, null);
|
||||
|
||||
const builtInNameSurface = await submitBrowserInput("/name Ship It");
|
||||
assert.equal(builtInNameSurface.outcome.kind, "surface");
|
||||
assert.equal(builtInNameSurface.outcome.surface, "name");
|
||||
assert.equal(builtInNameSurface.status, null);
|
||||
|
||||
const builtInReject = await submitBrowserInput("/share");
|
||||
assert.equal(builtInReject.outcome.kind, "reject");
|
||||
assert.match(builtInReject.notice ?? "", /blocked instead of falling through to the model/i);
|
||||
assert.equal(builtInReject.status, null);
|
||||
|
||||
// /gsd status is now a browser surface (S02), verify that
|
||||
const gsdSurface = await submitBrowserInput("/gsd status");
|
||||
assert.equal(gsdSurface.outcome.kind, "surface");
|
||||
assert.equal(gsdSurface.outcome.surface, "gsd-status");
|
||||
assert.equal(gsdSurface.status, null);
|
||||
|
||||
// /gsd auto is a passthrough subcommand — reaches the bridge as a prompt
|
||||
const gsdPrompt = await submitBrowserInput("/gsd auto");
|
||||
assert.equal(gsdPrompt.outcome.kind, "prompt");
|
||||
assert.equal(gsdPrompt.status, 200);
|
||||
assert.equal(gsdPrompt.body.command, "prompt");
|
||||
|
||||
const sentTypes = bridgeCommands.map((command) => command.type);
|
||||
assert.deepEqual(
|
||||
sentTypes.filter((type) => type !== "get_state"),
|
||||
["new_session", "prompt"],
|
||||
"only browser-executable slash commands should reach the live bridge; built-in surfaces/rejects must stay out of prompt text",
|
||||
);
|
||||
const promptCommand = bridgeCommands.find((command) => command.type === "prompt");
|
||||
assert.equal(promptCommand?.message, "/gsd auto", "GSD passthrough commands must stay on the extension prompt path");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
});
|
||||
|
||||
async function submitBrowserInput(input: string): Promise<{ outcome: any; status: number | null; body: any; notice: string | null }> {
|
||||
const outcome = dispatchBrowserSlashCommand(input);
|
||||
|
||||
if (outcome.kind === "prompt" || outcome.kind === "rpc") {
|
||||
const response = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(outcome.command),
|
||||
}),
|
||||
);
|
||||
return {
|
||||
outcome,
|
||||
status: response.status,
|
||||
body: await response.json(),
|
||||
notice: null,
|
||||
};
|
||||
}
|
||||
|
||||
const notice = getBrowserSlashCommandTerminalNotice(outcome)?.message ?? null;
|
||||
return {
|
||||
outcome,
|
||||
status: null,
|
||||
body: null,
|
||||
notice,
|
||||
};
|
||||
}
|
||||
|
||||
const builtInExecution = await submitBrowserInput("/new");
|
||||
assert.equal(builtInExecution.outcome.kind, "rpc");
|
||||
assert.equal(builtInExecution.status, 200);
|
||||
assert.equal(builtInExecution.body.command, "new_session");
|
||||
|
||||
const builtInSurface = await submitBrowserInput("/model");
|
||||
assert.equal(builtInSurface.outcome.kind, "surface");
|
||||
assert.equal(builtInSurface.outcome.surface, "model");
|
||||
assert.equal(builtInSurface.status, null);
|
||||
|
||||
const builtInNameSurface = await submitBrowserInput("/name Ship It");
|
||||
assert.equal(builtInNameSurface.outcome.kind, "surface");
|
||||
assert.equal(builtInNameSurface.outcome.surface, "name");
|
||||
assert.equal(builtInNameSurface.status, null);
|
||||
|
||||
const builtInReject = await submitBrowserInput("/share");
|
||||
assert.equal(builtInReject.outcome.kind, "reject");
|
||||
assert.match(builtInReject.notice ?? "", /blocked instead of falling through to the model/i);
|
||||
assert.equal(builtInReject.status, null);
|
||||
|
||||
// /gsd status is now a browser surface (S02), verify that
|
||||
const gsdSurface = await submitBrowserInput("/gsd status");
|
||||
assert.equal(gsdSurface.outcome.kind, "surface");
|
||||
assert.equal(gsdSurface.outcome.surface, "gsd-status");
|
||||
assert.equal(gsdSurface.status, null);
|
||||
|
||||
// /gsd auto is a passthrough subcommand — reaches the bridge as a prompt
|
||||
const gsdPrompt = await submitBrowserInput("/gsd auto");
|
||||
assert.equal(gsdPrompt.outcome.kind, "prompt");
|
||||
assert.equal(gsdPrompt.status, 200);
|
||||
assert.equal(gsdPrompt.body.command, "prompt");
|
||||
|
||||
const sentTypes = bridgeCommands.map((command) => command.type);
|
||||
assert.deepEqual(
|
||||
sentTypes.filter((type) => type !== "get_state"),
|
||||
["new_session", "prompt"],
|
||||
"only browser-executable slash commands should reach the live bridge; built-in surfaces/rejects must stay out of prompt text",
|
||||
);
|
||||
const promptCommand = bridgeCommands.find((command) => command.type === "prompt");
|
||||
assert.equal(promptCommand?.message, "/gsd auto", "GSD passthrough commands must stay on the extension prompt path");
|
||||
});
|
||||
|
|
|
|||
|
|
@ -295,7 +295,7 @@ function configureBridgeRuntime(
|
|||
}
|
||||
|
||||
|
||||
test("successful browser onboarding restarts the stale bridge child and unlocks the first prompt", async () => {
|
||||
test("successful browser onboarding restarts the stale bridge child and unlocks the first prompt", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
const harness = configureBridgeRuntime(fixture, authStorage);
|
||||
|
|
@ -304,65 +304,65 @@ test("successful browser onboarding restarts the stale bridge child and unlocks
|
|||
validateApiKey: async () => ({ ok: true, message: "openai credentials validated" }),
|
||||
});
|
||||
|
||||
try {
|
||||
const bootResponse = await bootRoute.GET();
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.onboarding.locked, true);
|
||||
assert.equal(bootPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
assert.equal(harness.generations[0]?.authVisibleAtStart, false);
|
||||
|
||||
const blockedPrompt = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "should stay locked" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(blockedPrompt.status, 423);
|
||||
const blockedPayload = (await blockedPrompt.json()) as any;
|
||||
assert.equal(blockedPayload.code, "onboarding_locked");
|
||||
assert.equal(blockedPayload.details.reason, "required_setup");
|
||||
assert.equal(harness.promptCount, 0);
|
||||
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
new Request("http://localhost/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-valid-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
assert.equal(validationResponse.status, 200);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.locked, false);
|
||||
assert.equal(validationPayload.onboarding.lockReason, null);
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(harness.spawnCalls, 2);
|
||||
assert.equal(harness.generations[1]?.authVisibleAtStart, true);
|
||||
|
||||
const firstPrompt = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "first unlocked prompt" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(firstPrompt.status, 200);
|
||||
const firstPromptPayload = (await firstPrompt.json()) as any;
|
||||
assert.equal(firstPromptPayload.success, true);
|
||||
assert.equal(firstPromptPayload.command, "prompt");
|
||||
assert.equal(harness.promptCount, 1);
|
||||
assert.deepEqual(harness.generations[1]?.promptMessages, ["first unlocked prompt"]);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bootResponse = await bootRoute.GET();
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.onboarding.locked, true);
|
||||
assert.equal(bootPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
assert.equal(harness.generations[0]?.authVisibleAtStart, false);
|
||||
|
||||
const blockedPrompt = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "should stay locked" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(blockedPrompt.status, 423);
|
||||
const blockedPayload = (await blockedPrompt.json()) as any;
|
||||
assert.equal(blockedPayload.code, "onboarding_locked");
|
||||
assert.equal(blockedPayload.details.reason, "required_setup");
|
||||
assert.equal(harness.promptCount, 0);
|
||||
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
new Request("http://localhost/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-valid-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
assert.equal(validationResponse.status, 200);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.locked, false);
|
||||
assert.equal(validationPayload.onboarding.lockReason, null);
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(harness.spawnCalls, 2);
|
||||
assert.equal(harness.generations[1]?.authVisibleAtStart, true);
|
||||
|
||||
const firstPrompt = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "first unlocked prompt" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(firstPrompt.status, 200);
|
||||
const firstPromptPayload = (await firstPrompt.json()) as any;
|
||||
assert.equal(firstPromptPayload.success, true);
|
||||
assert.equal(firstPromptPayload.command, "prompt");
|
||||
assert.equal(harness.promptCount, 1);
|
||||
assert.deepEqual(harness.generations[1]?.promptMessages, ["first unlocked prompt"]);
|
||||
});
|
||||
|
||||
test("refresh failures keep the workspace locked and expose the failed bridge-refresh reason", async () => {
|
||||
test("refresh failures keep the workspace locked and expose the failed bridge-refresh reason", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
const harness = configureBridgeRuntime(fixture, authStorage, { failRestart: true });
|
||||
|
|
@ -371,56 +371,56 @@ test("refresh failures keep the workspace locked and expose the failed bridge-re
|
|||
validateApiKey: async () => ({ ok: true, message: "openai credentials validated" }),
|
||||
});
|
||||
|
||||
try {
|
||||
const bootResponse = await bootRoute.GET();
|
||||
assert.equal(bootResponse.status, 200);
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
new Request("http://localhost/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-valid-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
assert.equal(validationResponse.status, 503);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.required.satisfied, true);
|
||||
assert.equal(validationPayload.onboarding.locked, true);
|
||||
assert.equal(validationPayload.onboarding.lockReason, "bridge_refresh_failed");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.status, "succeeded");
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "failed");
|
||||
assert.match(validationPayload.onboarding.bridgeAuthRefresh.error, /could not attach/i);
|
||||
assert.equal(harness.spawnCalls, 2);
|
||||
assert.equal(harness.generations[1]?.authVisibleAtStart, true);
|
||||
|
||||
const blockedPrompt = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "still locked after failed refresh" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(blockedPrompt.status, 423);
|
||||
const blockedPayload = (await blockedPrompt.json()) as any;
|
||||
assert.equal(blockedPayload.code, "onboarding_locked");
|
||||
assert.equal(blockedPayload.details.reason, "bridge_refresh_failed");
|
||||
assert.equal(harness.promptCount, 0);
|
||||
|
||||
const failedBootResponse = await bootRoute.GET();
|
||||
assert.equal(failedBootResponse.status, 200);
|
||||
const failedBootPayload = (await failedBootResponse.json()) as any;
|
||||
assert.equal(failedBootPayload.onboarding.locked, true);
|
||||
assert.equal(failedBootPayload.onboarding.lockReason, "bridge_refresh_failed");
|
||||
assert.equal(failedBootPayload.onboarding.bridgeAuthRefresh.phase, "failed");
|
||||
assert.match(failedBootPayload.onboarding.bridgeAuthRefresh.error, /could not attach/i);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bootResponse = await bootRoute.GET();
|
||||
assert.equal(bootResponse.status, 200);
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
new Request("http://localhost/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-valid-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
assert.equal(validationResponse.status, 503);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.required.satisfied, true);
|
||||
assert.equal(validationPayload.onboarding.locked, true);
|
||||
assert.equal(validationPayload.onboarding.lockReason, "bridge_refresh_failed");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.status, "succeeded");
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "failed");
|
||||
assert.match(validationPayload.onboarding.bridgeAuthRefresh.error, /could not attach/i);
|
||||
assert.equal(harness.spawnCalls, 2);
|
||||
assert.equal(harness.generations[1]?.authVisibleAtStart, true);
|
||||
|
||||
const blockedPrompt = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "still locked after failed refresh" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(blockedPrompt.status, 423);
|
||||
const blockedPayload = (await blockedPrompt.json()) as any;
|
||||
assert.equal(blockedPayload.code, "onboarding_locked");
|
||||
assert.equal(blockedPayload.details.reason, "bridge_refresh_failed");
|
||||
assert.equal(harness.promptCount, 0);
|
||||
|
||||
const failedBootResponse = await bootRoute.GET();
|
||||
assert.equal(failedBootResponse.status, 200);
|
||||
const failedBootPayload = (await failedBootResponse.json()) as any;
|
||||
assert.equal(failedBootPayload.onboarding.locked, true);
|
||||
assert.equal(failedBootPayload.onboarding.lockReason, "bridge_refresh_failed");
|
||||
assert.equal(failedBootPayload.onboarding.bridgeAuthRefresh.phase, "failed");
|
||||
assert.match(failedBootPayload.onboarding.bridgeAuthRefresh.error, /could not attach/i);
|
||||
});
|
||||
|
||||
test("fresh gsd --web browser onboarding stays locked on failed validation and unlocks after a successful retry", async (t) => {
|
||||
|
|
@ -434,76 +434,76 @@ test("fresh gsd --web browser onboarding stays locked on failed validation and u
|
|||
const browserLogPath = join(tempRoot, "browser-open.log")
|
||||
let port: number | null = null
|
||||
|
||||
try {
|
||||
const launch = await launchPackagedWebHost({
|
||||
launchCwd: repoRoot,
|
||||
tempHome,
|
||||
browserLogPath,
|
||||
env: {
|
||||
GSD_WEB_TEST_FAKE_API_KEY_VALIDATION: "1",
|
||||
ANTHROPIC_API_KEY: "",
|
||||
OPENAI_API_KEY: "",
|
||||
GOOGLE_API_KEY: "",
|
||||
},
|
||||
})
|
||||
port = launch.port
|
||||
|
||||
assert.equal(launch.exitCode, 0, `expected the web launcher to exit cleanly:\n${launch.stderr}`)
|
||||
assert.match(launch.stderr, /status=started/, "expected a started diagnostic line on stderr")
|
||||
|
||||
const auth = runtimeAuthHeaders(launch)
|
||||
await waitForHttpOk(`${launch.url}/api/boot`, undefined, auth)
|
||||
|
||||
// 1. Boot reports locked before any credentials are saved
|
||||
const bootBefore = await fetch(`${launch.url}/api/boot`, {
|
||||
method: "GET",
|
||||
headers: { Accept: "application/json", ...auth },
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
})
|
||||
assert.equal(bootBefore.ok, true, `expected boot endpoint to respond successfully: ${bootBefore.status}`)
|
||||
const bootBeforePayload = await bootBefore.json() as any
|
||||
assert.equal(bootBeforePayload.onboarding.locked, true)
|
||||
assert.equal(bootBeforePayload.onboarding.lockReason, "required_setup")
|
||||
|
||||
// 2. Invalid key → stays locked with failed validation
|
||||
const invalidValidation = await fetch(`${launch.url}/api/onboarding`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json", Accept: "application/json", ...auth },
|
||||
body: JSON.stringify({ action: "save_api_key", providerId: "openai", apiKey: "invalid-demo-key" }),
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
})
|
||||
assert.equal(invalidValidation.status, 422)
|
||||
const invalidPayload = await invalidValidation.json() as any
|
||||
assert.equal(invalidPayload.onboarding.locked, true)
|
||||
assert.equal(invalidPayload.onboarding.lastValidation.status, "failed")
|
||||
assert.match(invalidPayload.onboarding.lastValidation.message ?? "", /rejected/i)
|
||||
|
||||
// 3. Valid key → unlocks
|
||||
const validValidation = await fetch(`${launch.url}/api/onboarding`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json", Accept: "application/json", ...auth },
|
||||
body: JSON.stringify({ action: "save_api_key", providerId: "openai", apiKey: "valid-demo-key" }),
|
||||
signal: AbortSignal.timeout(60_000),
|
||||
})
|
||||
assert.equal(validValidation.status, 200, `expected successful retry to unlock onboarding: ${validValidation.status}`)
|
||||
const validPayload = await validValidation.json() as any
|
||||
assert.equal(validPayload.onboarding.locked, false)
|
||||
assert.equal(validPayload.onboarding.bridgeAuthRefresh.phase, "succeeded")
|
||||
|
||||
// 4. Boot confirms unlocked
|
||||
const bootAfter = await fetch(`${launch.url}/api/boot`, {
|
||||
method: "GET",
|
||||
headers: { Accept: "application/json", ...auth },
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
})
|
||||
assert.equal(bootAfter.ok, true)
|
||||
const bootAfterPayload = await bootAfter.json() as any
|
||||
assert.equal(bootAfterPayload.onboarding.locked, false)
|
||||
assert.equal(bootAfterPayload.onboarding.lockReason, null)
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
if (port !== null) {
|
||||
await killProcessOnPort(port)
|
||||
await killProcessOnPort(port)
|
||||
}
|
||||
rmSync(tempRoot, { recursive: true, force: true })
|
||||
}
|
||||
});
|
||||
|
||||
const launch = await launchPackagedWebHost({
|
||||
launchCwd: repoRoot,
|
||||
tempHome,
|
||||
browserLogPath,
|
||||
env: {
|
||||
GSD_WEB_TEST_FAKE_API_KEY_VALIDATION: "1",
|
||||
ANTHROPIC_API_KEY: "",
|
||||
OPENAI_API_KEY: "",
|
||||
GOOGLE_API_KEY: "",
|
||||
},
|
||||
})
|
||||
port = launch.port
|
||||
|
||||
assert.equal(launch.exitCode, 0, `expected the web launcher to exit cleanly:\n${launch.stderr}`)
|
||||
assert.match(launch.stderr, /status=started/, "expected a started diagnostic line on stderr")
|
||||
|
||||
const auth = runtimeAuthHeaders(launch)
|
||||
await waitForHttpOk(`${launch.url}/api/boot`, undefined, auth)
|
||||
|
||||
// 1. Boot reports locked before any credentials are saved
|
||||
const bootBefore = await fetch(`${launch.url}/api/boot`, {
|
||||
method: "GET",
|
||||
headers: { Accept: "application/json", ...auth },
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
})
|
||||
assert.equal(bootBefore.ok, true, `expected boot endpoint to respond successfully: ${bootBefore.status}`)
|
||||
const bootBeforePayload = await bootBefore.json() as any
|
||||
assert.equal(bootBeforePayload.onboarding.locked, true)
|
||||
assert.equal(bootBeforePayload.onboarding.lockReason, "required_setup")
|
||||
|
||||
// 2. Invalid key → stays locked with failed validation
|
||||
const invalidValidation = await fetch(`${launch.url}/api/onboarding`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json", Accept: "application/json", ...auth },
|
||||
body: JSON.stringify({ action: "save_api_key", providerId: "openai", apiKey: "invalid-demo-key" }),
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
})
|
||||
assert.equal(invalidValidation.status, 422)
|
||||
const invalidPayload = await invalidValidation.json() as any
|
||||
assert.equal(invalidPayload.onboarding.locked, true)
|
||||
assert.equal(invalidPayload.onboarding.lastValidation.status, "failed")
|
||||
assert.match(invalidPayload.onboarding.lastValidation.message ?? "", /rejected/i)
|
||||
|
||||
// 3. Valid key → unlocks
|
||||
const validValidation = await fetch(`${launch.url}/api/onboarding`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json", Accept: "application/json", ...auth },
|
||||
body: JSON.stringify({ action: "save_api_key", providerId: "openai", apiKey: "valid-demo-key" }),
|
||||
signal: AbortSignal.timeout(60_000),
|
||||
})
|
||||
assert.equal(validValidation.status, 200, `expected successful retry to unlock onboarding: ${validValidation.status}`)
|
||||
const validPayload = await validValidation.json() as any
|
||||
assert.equal(validPayload.onboarding.locked, false)
|
||||
assert.equal(validPayload.onboarding.bridgeAuthRefresh.phase, "succeeded")
|
||||
|
||||
// 4. Boot confirms unlocked
|
||||
const bootAfter = await fetch(`${launch.url}/api/boot`, {
|
||||
method: "GET",
|
||||
headers: { Accept: "application/json", ...auth },
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
})
|
||||
assert.equal(bootAfter.ok, true)
|
||||
const bootAfterPayload = await bootAfter.json() as any
|
||||
assert.equal(bootAfterPayload.onboarding.locked, false)
|
||||
assert.equal(bootAfterPayload.onboarding.lockReason, null)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -52,20 +52,18 @@ function makeTmpAuth(data: Record<string, unknown> = {}): { authPath: string; cl
|
|||
// 1. resolveSearchProvider — 8 scenarios
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
test('resolveSearchProvider returns tavily when only TAVILY_API_KEY is set', async () => {
|
||||
test('resolveSearchProvider returns tavily when only TAVILY_API_KEY is set', async (t) => {
|
||||
const { resolveSearchProvider } = await import(
|
||||
'../resources/extensions/search-the-web/provider.ts'
|
||||
)
|
||||
const { authPath, cleanup } = makeTmpAuth()
|
||||
try {
|
||||
withEnv({ TAVILY_API_KEY: 'tvly-test', BRAVE_API_KEY: undefined }, () => {
|
||||
// Override preference read to use our temp auth (auto)
|
||||
const result = resolveSearchProvider('auto')
|
||||
assert.equal(result, 'tavily')
|
||||
})
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
withEnv({ TAVILY_API_KEY: 'tvly-test', BRAVE_API_KEY: undefined }, () => {
|
||||
// Override preference read to use our temp auth (auto)
|
||||
const result = resolveSearchProvider('auto')
|
||||
assert.equal(result, 'tavily')
|
||||
})
|
||||
})
|
||||
|
||||
test('resolveSearchProvider returns brave when only BRAVE_API_KEY is set', async () => {
|
||||
|
|
@ -148,69 +146,61 @@ test('resolveSearchProvider falls back to other provider when preferred key miss
|
|||
// 2. Preference get/set round-trip
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
test('getSearchProviderPreference returns auto when no preference stored', async () => {
|
||||
test('getSearchProviderPreference returns auto when no preference stored', async (t) => {
|
||||
const { getSearchProviderPreference } = await import(
|
||||
'../resources/extensions/search-the-web/provider.ts'
|
||||
)
|
||||
const { authPath, cleanup } = makeTmpAuth()
|
||||
try {
|
||||
const pref = getSearchProviderPreference(authPath)
|
||||
assert.equal(pref, 'auto')
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
const pref = getSearchProviderPreference(authPath)
|
||||
assert.equal(pref, 'auto')
|
||||
})
|
||||
|
||||
test('getSearchProviderPreference reads from auth.json via AuthStorage', async () => {
|
||||
test('getSearchProviderPreference reads from auth.json via AuthStorage', async (t) => {
|
||||
const { getSearchProviderPreference } = await import(
|
||||
'../resources/extensions/search-the-web/provider.ts'
|
||||
)
|
||||
const { authPath, cleanup } = makeTmpAuth({
|
||||
search_provider: { type: 'api_key', key: 'tavily' },
|
||||
})
|
||||
try {
|
||||
const pref = getSearchProviderPreference(authPath)
|
||||
assert.equal(pref, 'tavily')
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
const pref = getSearchProviderPreference(authPath)
|
||||
assert.equal(pref, 'tavily')
|
||||
})
|
||||
|
||||
test('setSearchProviderPreference writes to auth.json via AuthStorage', async () => {
|
||||
test('setSearchProviderPreference writes to auth.json via AuthStorage', async (t) => {
|
||||
const { getSearchProviderPreference, setSearchProviderPreference } = await import(
|
||||
'../resources/extensions/search-the-web/provider.ts'
|
||||
)
|
||||
const { authPath, cleanup } = makeTmpAuth()
|
||||
try {
|
||||
setSearchProviderPreference('brave', authPath)
|
||||
const pref = getSearchProviderPreference(authPath)
|
||||
assert.equal(pref, 'brave')
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
// Round-trip: change to tavily
|
||||
setSearchProviderPreference('tavily', authPath)
|
||||
assert.equal(getSearchProviderPreference(authPath), 'tavily')
|
||||
setSearchProviderPreference('brave', authPath)
|
||||
const pref = getSearchProviderPreference(authPath)
|
||||
assert.equal(pref, 'brave')
|
||||
|
||||
// Round-trip: change to auto
|
||||
setSearchProviderPreference('auto', authPath)
|
||||
assert.equal(getSearchProviderPreference(authPath), 'auto')
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
// Round-trip: change to tavily
|
||||
setSearchProviderPreference('tavily', authPath)
|
||||
assert.equal(getSearchProviderPreference(authPath), 'tavily')
|
||||
|
||||
// Round-trip: change to auto
|
||||
setSearchProviderPreference('auto', authPath)
|
||||
assert.equal(getSearchProviderPreference(authPath), 'auto')
|
||||
})
|
||||
|
||||
test('getSearchProviderPreference returns auto for invalid stored value', async () => {
|
||||
test('getSearchProviderPreference returns auto for invalid stored value', async (t) => {
|
||||
const { getSearchProviderPreference } = await import(
|
||||
'../resources/extensions/search-the-web/provider.ts'
|
||||
)
|
||||
const { authPath, cleanup } = makeTmpAuth({
|
||||
search_provider: { type: 'api_key', key: 'google' },
|
||||
})
|
||||
try {
|
||||
const pref = getSearchProviderPreference(authPath)
|
||||
assert.equal(pref, 'auto', 'invalid stored value falls back to auto')
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
const pref = getSearchProviderPreference(authPath)
|
||||
assert.equal(pref, 'auto', 'invalid stored value falls back to auto')
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
|
|
|||
|
|
@ -49,85 +49,81 @@ test("getExtensionKey normalizes top-level .ts and .js entry names to the same k
|
|||
);
|
||||
});
|
||||
|
||||
test("hasStaleCompiledExtensionSiblings only flags top-level .ts/.js sibling pairs", async () => {
|
||||
test("hasStaleCompiledExtensionSiblings only flags top-level .ts/.js sibling pairs", async (t) => {
|
||||
const { hasStaleCompiledExtensionSiblings } = await import("../resource-loader.ts");
|
||||
const tmp = mkdtempSync(join(tmpdir(), "gsd-resource-loader-"));
|
||||
const extensionsDir = join(tmp, "extensions");
|
||||
|
||||
try {
|
||||
mkdirSync(join(extensionsDir, "gsd"), { recursive: true });
|
||||
writeFileSync(join(extensionsDir, "gsd", "index.ts"), "export {};\n");
|
||||
assert.equal(hasStaleCompiledExtensionSiblings(extensionsDir), false);
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }); });
|
||||
|
||||
writeFileSync(join(extensionsDir, "ask-user-questions.js"), "export {};\n");
|
||||
assert.equal(hasStaleCompiledExtensionSiblings(extensionsDir), false);
|
||||
mkdirSync(join(extensionsDir, "gsd"), { recursive: true });
|
||||
writeFileSync(join(extensionsDir, "gsd", "index.ts"), "export {};\n");
|
||||
assert.equal(hasStaleCompiledExtensionSiblings(extensionsDir), false);
|
||||
|
||||
writeFileSync(join(extensionsDir, "ask-user-questions.ts"), "export {};\n");
|
||||
assert.equal(hasStaleCompiledExtensionSiblings(extensionsDir), true);
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
writeFileSync(join(extensionsDir, "ask-user-questions.js"), "export {};\n");
|
||||
assert.equal(hasStaleCompiledExtensionSiblings(extensionsDir), false);
|
||||
|
||||
writeFileSync(join(extensionsDir, "ask-user-questions.ts"), "export {};\n");
|
||||
assert.equal(hasStaleCompiledExtensionSiblings(extensionsDir), true);
|
||||
});
|
||||
|
||||
test("buildResourceLoader excludes duplicate top-level pi extensions when bundled resources use .js", async () => {
|
||||
test("buildResourceLoader excludes duplicate top-level pi extensions when bundled resources use .js", async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), "gsd-resource-loader-home-"));
|
||||
const piExtensionsDir = join(tmp, ".pi", "agent", "extensions");
|
||||
const fakeAgentDir = join(tmp, ".gsd", "agent");
|
||||
const restoreHomeEnv = overrideHomeEnv(tmp);
|
||||
|
||||
try {
|
||||
mkdirSync(piExtensionsDir, { recursive: true });
|
||||
writeFileSync(join(piExtensionsDir, "ask-user-questions.ts"), "export {};\n");
|
||||
writeFileSync(join(piExtensionsDir, "custom-extension.ts"), "export {};\n");
|
||||
|
||||
const { buildResourceLoader } = await import("../resource-loader.ts");
|
||||
const loader = buildResourceLoader(fakeAgentDir) as { additionalExtensionPaths?: string[] };
|
||||
const additionalExtensionPaths = loader.additionalExtensionPaths ?? [];
|
||||
|
||||
assert.equal(
|
||||
additionalExtensionPaths.some((entryPath) => entryPath.endsWith("ask-user-questions.ts")),
|
||||
false,
|
||||
"bundled compiled extensions should suppress duplicate pi top-level .ts siblings",
|
||||
);
|
||||
assert.equal(
|
||||
additionalExtensionPaths.some((entryPath) => entryPath.endsWith("custom-extension.ts")),
|
||||
true,
|
||||
"non-duplicate pi extensions should still load",
|
||||
);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
restoreHomeEnv();
|
||||
rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
mkdirSync(piExtensionsDir, { recursive: true });
|
||||
writeFileSync(join(piExtensionsDir, "ask-user-questions.ts"), "export {};\n");
|
||||
writeFileSync(join(piExtensionsDir, "custom-extension.ts"), "export {};\n");
|
||||
|
||||
const { buildResourceLoader } = await import("../resource-loader.ts");
|
||||
const loader = buildResourceLoader(fakeAgentDir) as { additionalExtensionPaths?: string[] };
|
||||
const additionalExtensionPaths = loader.additionalExtensionPaths ?? [];
|
||||
|
||||
assert.equal(
|
||||
additionalExtensionPaths.some((entryPath) => entryPath.endsWith("ask-user-questions.ts")),
|
||||
false,
|
||||
"bundled compiled extensions should suppress duplicate pi top-level .ts siblings",
|
||||
);
|
||||
assert.equal(
|
||||
additionalExtensionPaths.some((entryPath) => entryPath.endsWith("custom-extension.ts")),
|
||||
true,
|
||||
"non-duplicate pi extensions should still load",
|
||||
);
|
||||
});
|
||||
|
||||
test("initResources prunes stale top-level extension siblings next to bundled compiled extensions", async () => {
|
||||
test("initResources prunes stale top-level extension siblings next to bundled compiled extensions", async (t) => {
|
||||
const { initResources } = await import("../resource-loader.ts");
|
||||
const tmp = mkdtempSync(join(tmpdir(), "gsd-resource-loader-sync-"));
|
||||
const fakeAgentDir = join(tmp, "agent");
|
||||
const bundledTsPath = join(fakeAgentDir, "extensions", "ask-user-questions.ts");
|
||||
const bundledJsPath = join(fakeAgentDir, "extensions", "ask-user-questions.js");
|
||||
|
||||
try {
|
||||
initResources(fakeAgentDir);
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }); });
|
||||
|
||||
const bundledPath = existsSync(bundledJsPath)
|
||||
? bundledJsPath
|
||||
: bundledTsPath;
|
||||
const staleSiblingPath = bundledPath.endsWith(".js")
|
||||
? bundledTsPath
|
||||
: bundledJsPath;
|
||||
initResources(fakeAgentDir);
|
||||
|
||||
assert.equal(existsSync(bundledPath), true, "bundled top-level extension should exist");
|
||||
const bundledPath = existsSync(bundledJsPath)
|
||||
? bundledJsPath
|
||||
: bundledTsPath;
|
||||
const staleSiblingPath = bundledPath.endsWith(".js")
|
||||
? bundledTsPath
|
||||
: bundledJsPath;
|
||||
|
||||
// Simulate a stale opposite-format sibling left from a previous sync/build mismatch.
|
||||
writeFileSync(staleSiblingPath, "export {};\n");
|
||||
assert.equal(existsSync(staleSiblingPath), true);
|
||||
assert.equal(existsSync(bundledPath), true, "bundled top-level extension should exist");
|
||||
|
||||
initResources(fakeAgentDir);
|
||||
// Simulate a stale opposite-format sibling left from a previous sync/build mismatch.
|
||||
writeFileSync(staleSiblingPath, "export {};\n");
|
||||
assert.equal(existsSync(staleSiblingPath), true);
|
||||
|
||||
assert.equal(existsSync(staleSiblingPath), false, "stale top-level sibling should be removed during sync");
|
||||
assert.equal(existsSync(bundledPath), true, "bundled extension should remain after cleanup");
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
initResources(fakeAgentDir);
|
||||
|
||||
assert.equal(existsSync(staleSiblingPath), false, "stale top-level sibling should be removed during sync");
|
||||
assert.equal(existsSync(bundledPath), true, "bundled extension should remain after cleanup");
|
||||
});
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import { tmpdir } from "node:os";
|
|||
* with a broken import to persist at ~/.gsd/agent/extensions/).
|
||||
*/
|
||||
|
||||
test("resource manifest includes contentHash", async () => {
|
||||
test("resource manifest includes contentHash", async (t) => {
|
||||
// We can't easily call initResources directly because it depends on
|
||||
// module-level resolved paths. Instead, verify the manifest schema
|
||||
// by simulating what writeManagedResourceManifest produces.
|
||||
|
|
@ -25,15 +25,13 @@ test("resource manifest includes contentHash", async () => {
|
|||
const tmpDir = mkdtempSync(join(tmpdir(), "gsd-resource-test-"));
|
||||
const manifestPath = join(tmpDir, "managed-resources.json");
|
||||
|
||||
try {
|
||||
writeFileSync(manifestPath, JSON.stringify(manifest));
|
||||
const read = JSON.parse(readFileSync(manifestPath, "utf-8"));
|
||||
assert.equal(read.gsdVersion, "2.28.0");
|
||||
assert.equal(read.contentHash, "abc123def456");
|
||||
assert.equal(typeof read.syncedAt, "number");
|
||||
} finally {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); });
|
||||
|
||||
writeFileSync(manifestPath, JSON.stringify(manifest));
|
||||
const read = JSON.parse(readFileSync(manifestPath, "utf-8"));
|
||||
assert.equal(read.gsdVersion, "2.28.0");
|
||||
assert.equal(read.contentHash, "abc123def456");
|
||||
assert.equal(typeof read.syncedAt, "number");
|
||||
});
|
||||
|
||||
test("missing contentHash in manifest triggers re-sync (upgrade path)", () => {
|
||||
|
|
|
|||
|
|
@ -116,83 +116,83 @@ async function callSearch(
|
|||
* state (lastSearchKey, consecutiveDupeCount) starts fresh here.
|
||||
*/
|
||||
|
||||
test("search loop guard fires after MAX_CONSECUTIVE_DUPES duplicates", async () => {
|
||||
test("search loop guard fires after MAX_CONSECUTIVE_DUPES duplicates", async (t) => {
|
||||
process.env.BRAVE_API_KEY = "test-key-loop-guard";
|
||||
delete process.env.TAVILY_API_KEY;
|
||||
delete process.env.OLLAMA_API_KEY;
|
||||
const restoreFetch = mockFetch(makeBraveResponse());
|
||||
|
||||
try {
|
||||
const pi = createMockPI();
|
||||
registerSearchTool(pi as any);
|
||||
const tool = pi.getRegisteredTool();
|
||||
assert.ok(tool, "search tool should be registered");
|
||||
|
||||
const execute = tool.execute.bind(tool);
|
||||
|
||||
// Calls 1–3: below threshold, should return search results (not an error)
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
const result = await callSearch(execute, "loop test query", `call-${i}`);
|
||||
assert.notEqual(result.isError, true, `call ${i} should not trigger loop guard`);
|
||||
}
|
||||
|
||||
// Call 4: hits the threshold — guard fires
|
||||
const result4 = await callSearch(execute, "loop test query", "call-4");
|
||||
assert.equal(result4.isError, true, "call 4 should trigger the loop guard");
|
||||
assert.equal(result4.details?.errorKind, "search_loop");
|
||||
assert.ok(
|
||||
result4.content[0].text.includes("Search loop detected"),
|
||||
"error message should mention search loop"
|
||||
);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
restoreFetch();
|
||||
restoreSearchEnv();
|
||||
});
|
||||
|
||||
const pi = createMockPI();
|
||||
registerSearchTool(pi as any);
|
||||
const tool = pi.getRegisteredTool();
|
||||
assert.ok(tool, "search tool should be registered");
|
||||
|
||||
const execute = tool.execute.bind(tool);
|
||||
|
||||
// Calls 1–3: below threshold, should return search results (not an error)
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
const result = await callSearch(execute, "loop test query", `call-${i}`);
|
||||
assert.notEqual(result.isError, true, `call ${i} should not trigger loop guard`);
|
||||
}
|
||||
|
||||
// Call 4: hits the threshold — guard fires
|
||||
const result4 = await callSearch(execute, "loop test query", "call-4");
|
||||
assert.equal(result4.isError, true, "call 4 should trigger the loop guard");
|
||||
assert.equal(result4.details?.errorKind, "search_loop");
|
||||
assert.ok(
|
||||
result4.content[0].text.includes("Search loop detected"),
|
||||
"error message should mention search loop"
|
||||
);
|
||||
});
|
||||
|
||||
test("search loop guard resets at session_start boundary", async () => {
|
||||
test("search loop guard resets at session_start boundary", async (t) => {
|
||||
process.env.BRAVE_API_KEY = "test-key-loop-guard-session";
|
||||
delete process.env.TAVILY_API_KEY;
|
||||
delete process.env.OLLAMA_API_KEY;
|
||||
const restoreFetch = mockFetch(makeBraveResponse());
|
||||
const query = "session boundary query";
|
||||
|
||||
try {
|
||||
const pi = createMockPI();
|
||||
const mockCtx = {
|
||||
hasUI: false,
|
||||
ui: { notify() {} },
|
||||
};
|
||||
searchExtension(pi as any);
|
||||
await pi.fire("session_start", {}, mockCtx);
|
||||
|
||||
const tool = pi.getRegisteredTool();
|
||||
assert.ok(tool, "search tool should be registered");
|
||||
const execute = tool.execute.bind(tool);
|
||||
|
||||
// Trigger guard in session 1
|
||||
for (let i = 1; i <= 4; i++) {
|
||||
await callSearch(execute, query, `s1-call-${i}`);
|
||||
}
|
||||
const guardResult = await callSearch(execute, query, "s1-call-5");
|
||||
assert.equal(guardResult.isError, true, "session 1 should be guarded");
|
||||
assert.equal(guardResult.details?.errorKind, "search_loop");
|
||||
|
||||
// New session should clear guard state
|
||||
await pi.fire("session_start", {}, mockCtx);
|
||||
const firstCallSession2 = await callSearch(execute, query, "s2-call-1");
|
||||
assert.notEqual(
|
||||
firstCallSession2.isError,
|
||||
true,
|
||||
"first identical query in a new session should not be blocked by prior session state",
|
||||
);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
restoreFetch();
|
||||
restoreSearchEnv();
|
||||
});
|
||||
|
||||
const pi = createMockPI();
|
||||
const mockCtx = {
|
||||
hasUI: false,
|
||||
ui: { notify() {} },
|
||||
};
|
||||
searchExtension(pi as any);
|
||||
await pi.fire("session_start", {}, mockCtx);
|
||||
|
||||
const tool = pi.getRegisteredTool();
|
||||
assert.ok(tool, "search tool should be registered");
|
||||
const execute = tool.execute.bind(tool);
|
||||
|
||||
// Trigger guard in session 1
|
||||
for (let i = 1; i <= 4; i++) {
|
||||
await callSearch(execute, query, `s1-call-${i}`);
|
||||
}
|
||||
const guardResult = await callSearch(execute, query, "s1-call-5");
|
||||
assert.equal(guardResult.isError, true, "session 1 should be guarded");
|
||||
assert.equal(guardResult.details?.errorKind, "search_loop");
|
||||
|
||||
// New session should clear guard state
|
||||
await pi.fire("session_start", {}, mockCtx);
|
||||
const firstCallSession2 = await callSearch(execute, query, "s2-call-1");
|
||||
assert.notEqual(
|
||||
firstCallSession2.isError,
|
||||
true,
|
||||
"first identical query in a new session should not be blocked by prior session state",
|
||||
);
|
||||
});
|
||||
|
||||
test("search loop guard stays armed after firing — subsequent duplicates immediately re-trigger (#1671)", async () => {
|
||||
test("search loop guard stays armed after firing — subsequent duplicates immediately re-trigger (#1671)", async (t) => {
|
||||
process.env.BRAVE_API_KEY = "test-key-loop-guard-2";
|
||||
delete process.env.TAVILY_API_KEY;
|
||||
delete process.env.OLLAMA_API_KEY;
|
||||
|
|
@ -201,42 +201,42 @@ test("search loop guard stays armed after firing — subsequent duplicates immed
|
|||
// Use a unique query so module-level state from previous test doesn't interfere
|
||||
const query = "persistent loop query";
|
||||
|
||||
try {
|
||||
const pi = createMockPI();
|
||||
registerSearchTool(pi as any);
|
||||
const tool = pi.getRegisteredTool();
|
||||
const execute = tool.execute.bind(tool);
|
||||
|
||||
// Exhaust the initial window (calls 1–3 succeed, call 4 fires guard)
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
await callSearch(execute, query, `call-${i}`);
|
||||
}
|
||||
const guardFirst = await callSearch(execute, query, "call-4");
|
||||
assert.equal(guardFirst.isError, true, "call 4 should trigger the loop guard");
|
||||
|
||||
// Key regression test: call 5 (and beyond) must ALSO trigger the guard.
|
||||
// The original bug reset state on trigger, so call 5 was treated as a fresh
|
||||
// first search and the loop restarted.
|
||||
const guardSecond = await callSearch(execute, query, "call-5");
|
||||
assert.equal(
|
||||
guardSecond.isError, true,
|
||||
"call 5 should STILL trigger the loop guard (guard must stay armed after firing)"
|
||||
);
|
||||
assert.equal(guardSecond.details?.errorKind, "search_loop");
|
||||
|
||||
// Call 6 as well — guard should keep firing
|
||||
const guardThird = await callSearch(execute, query, "call-6");
|
||||
assert.equal(
|
||||
guardThird.isError, true,
|
||||
"call 6 should STILL trigger the loop guard"
|
||||
);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
restoreFetch();
|
||||
restoreSearchEnv();
|
||||
});
|
||||
|
||||
const pi = createMockPI();
|
||||
registerSearchTool(pi as any);
|
||||
const tool = pi.getRegisteredTool();
|
||||
const execute = tool.execute.bind(tool);
|
||||
|
||||
// Exhaust the initial window (calls 1–3 succeed, call 4 fires guard)
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
await callSearch(execute, query, `call-${i}`);
|
||||
}
|
||||
const guardFirst = await callSearch(execute, query, "call-4");
|
||||
assert.equal(guardFirst.isError, true, "call 4 should trigger the loop guard");
|
||||
|
||||
// Key regression test: call 5 (and beyond) must ALSO trigger the guard.
|
||||
// The original bug reset state on trigger, so call 5 was treated as a fresh
|
||||
// first search and the loop restarted.
|
||||
const guardSecond = await callSearch(execute, query, "call-5");
|
||||
assert.equal(
|
||||
guardSecond.isError, true,
|
||||
"call 5 should STILL trigger the loop guard (guard must stay armed after firing)"
|
||||
);
|
||||
assert.equal(guardSecond.details?.errorKind, "search_loop");
|
||||
|
||||
// Call 6 as well — guard should keep firing
|
||||
const guardThird = await callSearch(execute, query, "call-6");
|
||||
assert.equal(
|
||||
guardThird.isError, true,
|
||||
"call 6 should STILL trigger the loop guard"
|
||||
);
|
||||
});
|
||||
|
||||
test("search loop guard resets cleanly when a different query is issued", async () => {
|
||||
test("search loop guard resets cleanly when a different query is issued", async (t) => {
|
||||
process.env.BRAVE_API_KEY = "test-key-loop-guard-3";
|
||||
delete process.env.TAVILY_API_KEY;
|
||||
delete process.env.OLLAMA_API_KEY;
|
||||
|
|
@ -245,25 +245,25 @@ test("search loop guard resets cleanly when a different query is issued", async
|
|||
const queryA = "query alpha reset test";
|
||||
const queryB = "query beta reset test";
|
||||
|
||||
try {
|
||||
const pi = createMockPI();
|
||||
registerSearchTool(pi as any);
|
||||
const tool = pi.getRegisteredTool();
|
||||
const execute = tool.execute.bind(tool);
|
||||
|
||||
// Trigger guard for queryA
|
||||
for (let i = 1; i <= 4; i++) {
|
||||
await callSearch(execute, queryA, `call-a-${i}`);
|
||||
}
|
||||
|
||||
// Issue a different query — should succeed (resets the duplicate counter)
|
||||
const resultB = await callSearch(execute, queryB, "call-b-1");
|
||||
assert.notEqual(
|
||||
resultB.isError, true,
|
||||
"a different query after guard should not be treated as a loop"
|
||||
);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
restoreFetch();
|
||||
restoreSearchEnv();
|
||||
});
|
||||
|
||||
const pi = createMockPI();
|
||||
registerSearchTool(pi as any);
|
||||
const tool = pi.getRegisteredTool();
|
||||
const execute = tool.execute.bind(tool);
|
||||
|
||||
// Trigger guard for queryA
|
||||
for (let i = 1; i <= 4; i++) {
|
||||
await callSearch(execute, queryA, `call-a-${i}`);
|
||||
}
|
||||
|
||||
// Issue a different query — should succeed (resets the duplicate counter)
|
||||
const resultB = await callSearch(execute, queryB, "call-b-1");
|
||||
assert.notEqual(
|
||||
resultB.isError, true,
|
||||
"a different query after guard should not be treated as a loop"
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -118,79 +118,73 @@ async function loadCommand(): Promise<CapturedCommand> {
|
|||
// 1. Direct arg — tavily
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
test('direct arg "tavily" sets preference and notifies', async () => {
|
||||
test('direct arg "tavily" sets preference and notifies', async (t) => {
|
||||
const { setSearchProviderPreference, getSearchProviderPreference } = await import(
|
||||
'../resources/extensions/search-the-web/provider.ts'
|
||||
)
|
||||
const cmd = await loadCommand()
|
||||
const { authPath, cleanup } = makeTmpAuth()
|
||||
|
||||
try {
|
||||
await withEnv({ TAVILY_API_KEY: 'tvly-test', BRAVE_API_KEY: undefined }, async () => {
|
||||
// Pre-set to auto so we can verify the change
|
||||
setSearchProviderPreference('auto', authPath)
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
const ctx = makeMockCtx()
|
||||
await cmd.handler('tavily', ctx)
|
||||
await withEnv({ TAVILY_API_KEY: 'tvly-test', BRAVE_API_KEY: undefined }, async () => {
|
||||
// Pre-set to auto so we can verify the change
|
||||
setSearchProviderPreference('auto', authPath)
|
||||
|
||||
// No select UI shown
|
||||
assert.equal(ctx.ui.selectCalls.length, 0, 'should not show select UI for direct arg')
|
||||
const ctx = makeMockCtx()
|
||||
await cmd.handler('tavily', ctx)
|
||||
|
||||
// Notification sent
|
||||
assert.equal(ctx.ui.notifyCalls.length, 1, 'should notify once')
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Search provider set to tavily/, 'notification should confirm provider set')
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Effective provider: tavily/, 'notification should show effective provider')
|
||||
})
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
// No select UI shown
|
||||
assert.equal(ctx.ui.selectCalls.length, 0, 'should not show select UI for direct arg')
|
||||
|
||||
// Notification sent
|
||||
assert.equal(ctx.ui.notifyCalls.length, 1, 'should notify once')
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Search provider set to tavily/, 'notification should confirm provider set')
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Effective provider: tavily/, 'notification should show effective provider')
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 2. Direct arg — brave
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
test('direct arg "brave" sets preference and notifies', async () => {
|
||||
test('direct arg "brave" sets preference and notifies', async (t) => {
|
||||
const cmd = await loadCommand()
|
||||
const { authPath, cleanup } = makeTmpAuth()
|
||||
|
||||
try {
|
||||
await withEnv({ TAVILY_API_KEY: undefined, BRAVE_API_KEY: 'BSA-test' }, async () => {
|
||||
const ctx = makeMockCtx()
|
||||
await cmd.handler('brave', ctx)
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
assert.equal(ctx.ui.selectCalls.length, 0)
|
||||
assert.equal(ctx.ui.notifyCalls.length, 1)
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Search provider set to brave/)
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Effective provider: brave/)
|
||||
})
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
await withEnv({ TAVILY_API_KEY: undefined, BRAVE_API_KEY: 'BSA-test' }, async () => {
|
||||
const ctx = makeMockCtx()
|
||||
await cmd.handler('brave', ctx)
|
||||
|
||||
assert.equal(ctx.ui.selectCalls.length, 0)
|
||||
assert.equal(ctx.ui.notifyCalls.length, 1)
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Search provider set to brave/)
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Effective provider: brave/)
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 3. Direct arg — auto
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
test('direct arg "auto" sets preference and notifies', async () => {
|
||||
test('direct arg "auto" sets preference and notifies', async (t) => {
|
||||
const cmd = await loadCommand()
|
||||
const { authPath, cleanup } = makeTmpAuth()
|
||||
|
||||
try {
|
||||
await withEnv({ TAVILY_API_KEY: 'tvly-test', BRAVE_API_KEY: 'BSA-test' }, async () => {
|
||||
const ctx = makeMockCtx()
|
||||
await cmd.handler('auto', ctx)
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
assert.equal(ctx.ui.selectCalls.length, 0)
|
||||
assert.equal(ctx.ui.notifyCalls.length, 1)
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Search provider set to auto/)
|
||||
// auto with both keys → tavily
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Effective provider: tavily/)
|
||||
})
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
await withEnv({ TAVILY_API_KEY: 'tvly-test', BRAVE_API_KEY: 'BSA-test' }, async () => {
|
||||
const ctx = makeMockCtx()
|
||||
await cmd.handler('auto', ctx)
|
||||
|
||||
assert.equal(ctx.ui.selectCalls.length, 0)
|
||||
assert.equal(ctx.ui.notifyCalls.length, 1)
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Search provider set to auto/)
|
||||
// auto with both keys → tavily
|
||||
assert.match(ctx.ui.notifyCalls[0].message, /Effective provider: tavily/)
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
|
@ -227,29 +221,27 @@ test('no arg shows select UI with 3 options, user picks brave', async () => {
|
|||
// 5. Cancel (select returns undefined) — no side effects
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
test('cancel (select returns undefined) produces no side effects', async () => {
|
||||
test('cancel (select returns undefined) produces no side effects', async (t) => {
|
||||
const { getSearchProviderPreference, setSearchProviderPreference } = await import(
|
||||
'../resources/extensions/search-the-web/provider.ts'
|
||||
)
|
||||
const cmd = await loadCommand()
|
||||
const { authPath, cleanup } = makeTmpAuth()
|
||||
|
||||
try {
|
||||
await withEnv({ TAVILY_API_KEY: 'tvly-test', BRAVE_API_KEY: undefined }, async () => {
|
||||
setSearchProviderPreference('tavily', authPath)
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
// selectReturn = undefined simulates Esc
|
||||
const ctx = makeMockCtx(undefined)
|
||||
await cmd.handler('', ctx)
|
||||
await withEnv({ TAVILY_API_KEY: 'tvly-test', BRAVE_API_KEY: undefined }, async () => {
|
||||
setSearchProviderPreference('tavily', authPath)
|
||||
|
||||
// Select was called
|
||||
assert.equal(ctx.ui.selectCalls.length, 1)
|
||||
// No notification (no side effects)
|
||||
assert.equal(ctx.ui.notifyCalls.length, 0, 'cancel should produce no notification')
|
||||
})
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
// selectReturn = undefined simulates Esc
|
||||
const ctx = makeMockCtx(undefined)
|
||||
await cmd.handler('', ctx)
|
||||
|
||||
// Select was called
|
||||
assert.equal(ctx.ui.selectCalls.length, 1)
|
||||
// No notification (no side effects)
|
||||
assert.equal(ctx.ui.notifyCalls.length, 0, 'cancel should produce no notification')
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
|
|
|||
|
|
@ -83,120 +83,120 @@ function mockFetch(responseBody: unknown, status = 200) {
|
|||
// Test: executeTavilySearch produces correct CachedSearchResult shape
|
||||
// =============================================================================
|
||||
|
||||
test("executeTavilySearch sends POST to Tavily API and produces CachedSearchResult", async () => {
|
||||
test("executeTavilySearch sends POST to Tavily API and produces CachedSearchResult", async (t) => {
|
||||
// Set TAVILY_API_KEY for this test
|
||||
const origKey = process.env.TAVILY_API_KEY;
|
||||
process.env.TAVILY_API_KEY = "tvly-test-key-12345";
|
||||
|
||||
const { captured, restore } = mockFetch(makeTavilyResponse());
|
||||
|
||||
try {
|
||||
// Dynamic import to get the module-level function
|
||||
// We need to call it through the module — but executeTavilySearch is not exported.
|
||||
// Instead, we test through the tool's execute path by importing the module fresh.
|
||||
// Since executeTavilySearch is a private function, we test it indirectly through
|
||||
// the request captured by our mock fetch.
|
||||
|
||||
// Import the normalization helpers to verify the mapping
|
||||
const { normalizeTavilyResult } = await import("../resources/extensions/search-the-web/tavily.ts");
|
||||
|
||||
// Simulate what executeTavilySearch does: build request, call fetch, map response
|
||||
const requestBody: Record<string, unknown> = {
|
||||
query: "test query",
|
||||
max_results: 10,
|
||||
search_depth: "basic",
|
||||
};
|
||||
|
||||
const response = await globalThis.fetch("https://api.tavily.com/search", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Bearer tvly-test-key-12345",
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
const data = await response.json() as { results: Array<{ title: string; url: string; content: string; score: number; published_date?: string }> };
|
||||
|
||||
// Verify request shape
|
||||
assert.equal(captured.url, "https://api.tavily.com/search", "request URL");
|
||||
assert.equal(captured.method, "POST", "HTTP method");
|
||||
assert.equal(captured.headers?.["Content-Type"], "application/json", "Content-Type header");
|
||||
assert.equal(captured.headers?.["Authorization"], "Bearer tvly-test-key-12345", "Authorization header");
|
||||
assert.deepEqual(captured.body, requestBody, "request body");
|
||||
|
||||
// Verify response mapping
|
||||
const mapped = data.results.map(normalizeTavilyResult);
|
||||
assert.equal(mapped.length, 2);
|
||||
assert.equal(mapped[0].title, "First Result");
|
||||
assert.equal(mapped[0].url, "https://example.com/first");
|
||||
assert.equal(mapped[0].description, "Description of first result.");
|
||||
assert.ok(mapped[0].age, "Published date should produce an age string");
|
||||
assert.equal(mapped[1].title, "Second Result");
|
||||
assert.equal(mapped[1].age, undefined, "No published_date → no age");
|
||||
} finally {
|
||||
t.after(() => {
|
||||
restore();
|
||||
if (origKey !== undefined) process.env.TAVILY_API_KEY = origKey;
|
||||
else delete process.env.TAVILY_API_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
// Dynamic import to get the module-level function
|
||||
// We need to call it through the module — but executeTavilySearch is not exported.
|
||||
// Instead, we test through the tool's execute path by importing the module fresh.
|
||||
// Since executeTavilySearch is a private function, we test it indirectly through
|
||||
// the request captured by our mock fetch.
|
||||
|
||||
// Import the normalization helpers to verify the mapping
|
||||
const { normalizeTavilyResult } = await import("../resources/extensions/search-the-web/tavily.ts");
|
||||
|
||||
// Simulate what executeTavilySearch does: build request, call fetch, map response
|
||||
const requestBody: Record<string, unknown> = {
|
||||
query: "test query",
|
||||
max_results: 10,
|
||||
search_depth: "basic",
|
||||
};
|
||||
|
||||
const response = await globalThis.fetch("https://api.tavily.com/search", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Bearer tvly-test-key-12345",
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
const data = await response.json() as { results: Array<{ title: string; url: string; content: string; score: number; published_date?: string }> };
|
||||
|
||||
// Verify request shape
|
||||
assert.equal(captured.url, "https://api.tavily.com/search", "request URL");
|
||||
assert.equal(captured.method, "POST", "HTTP method");
|
||||
assert.equal(captured.headers?.["Content-Type"], "application/json", "Content-Type header");
|
||||
assert.equal(captured.headers?.["Authorization"], "Bearer tvly-test-key-12345", "Authorization header");
|
||||
assert.deepEqual(captured.body, requestBody, "request body");
|
||||
|
||||
// Verify response mapping
|
||||
const mapped = data.results.map(normalizeTavilyResult);
|
||||
assert.equal(mapped.length, 2);
|
||||
assert.equal(mapped[0].title, "First Result");
|
||||
assert.equal(mapped[0].url, "https://example.com/first");
|
||||
assert.equal(mapped[0].description, "Description of first result.");
|
||||
assert.ok(mapped[0].age, "Published date should produce an age string");
|
||||
assert.equal(mapped[1].title, "Second Result");
|
||||
assert.equal(mapped[1].age, undefined, "No published_date → no age");
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// Test: Provider branching — resolveSearchProvider returns correct provider
|
||||
// =============================================================================
|
||||
|
||||
test("resolveSearchProvider returns 'tavily' when TAVILY_API_KEY is set and BRAVE_API_KEY is not", () => {
|
||||
test("resolveSearchProvider returns 'tavily' when TAVILY_API_KEY is set and BRAVE_API_KEY is not", (t) => {
|
||||
const origTavily = process.env.TAVILY_API_KEY;
|
||||
const origBrave = process.env.BRAVE_API_KEY;
|
||||
|
||||
process.env.TAVILY_API_KEY = "tvly-test-key";
|
||||
delete process.env.BRAVE_API_KEY;
|
||||
|
||||
try {
|
||||
const provider = resolveSearchProvider();
|
||||
assert.equal(provider, "tavily");
|
||||
} finally {
|
||||
t.after(() => {
|
||||
if (origTavily !== undefined) process.env.TAVILY_API_KEY = origTavily;
|
||||
else delete process.env.TAVILY_API_KEY;
|
||||
if (origBrave !== undefined) process.env.BRAVE_API_KEY = origBrave;
|
||||
else delete process.env.BRAVE_API_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
const provider = resolveSearchProvider();
|
||||
assert.equal(provider, "tavily");
|
||||
});
|
||||
|
||||
test("resolveSearchProvider returns 'brave' when only BRAVE_API_KEY is set", () => {
|
||||
test("resolveSearchProvider returns 'brave' when only BRAVE_API_KEY is set", (t) => {
|
||||
const origTavily = process.env.TAVILY_API_KEY;
|
||||
const origBrave = process.env.BRAVE_API_KEY;
|
||||
|
||||
delete process.env.TAVILY_API_KEY;
|
||||
process.env.BRAVE_API_KEY = "BSA-test-key";
|
||||
|
||||
try {
|
||||
const provider = resolveSearchProvider();
|
||||
assert.equal(provider, "brave");
|
||||
} finally {
|
||||
t.after(() => {
|
||||
if (origTavily !== undefined) process.env.TAVILY_API_KEY = origTavily;
|
||||
else delete process.env.TAVILY_API_KEY;
|
||||
if (origBrave !== undefined) process.env.BRAVE_API_KEY = origBrave;
|
||||
else delete process.env.BRAVE_API_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
const provider = resolveSearchProvider();
|
||||
assert.equal(provider, "brave");
|
||||
});
|
||||
|
||||
test("resolveSearchProvider returns null when neither key is set", () => {
|
||||
test("resolveSearchProvider returns null when neither key is set", (t) => {
|
||||
const origTavily = process.env.TAVILY_API_KEY;
|
||||
const origBrave = process.env.BRAVE_API_KEY;
|
||||
|
||||
delete process.env.TAVILY_API_KEY;
|
||||
delete process.env.BRAVE_API_KEY;
|
||||
|
||||
try {
|
||||
const provider = resolveSearchProvider();
|
||||
assert.equal(provider, null);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
if (origTavily !== undefined) process.env.TAVILY_API_KEY = origTavily;
|
||||
else delete process.env.BRAVE_API_KEY;
|
||||
if (origBrave !== undefined) process.env.BRAVE_API_KEY = origBrave;
|
||||
else delete process.env.BRAVE_API_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
const provider = resolveSearchProvider();
|
||||
assert.equal(provider, null);
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
|
|
@ -245,7 +245,7 @@ test("no-key error message contains both TAVILY_API_KEY and BRAVE_API_KEY", () =
|
|||
// Test: Tavily answer mapping — answer field flows through as summary text
|
||||
// =============================================================================
|
||||
|
||||
test("Tavily answer field maps to summaryText in CachedSearchResult", async () => {
|
||||
test("Tavily answer field maps to summaryText in CachedSearchResult", async (t) => {
|
||||
const origKey = process.env.TAVILY_API_KEY;
|
||||
process.env.TAVILY_API_KEY = "tvly-test-key";
|
||||
|
||||
|
|
@ -255,29 +255,29 @@ test("Tavily answer field maps to summaryText in CachedSearchResult", async () =
|
|||
|
||||
const { captured, restore } = mockFetch(responseWithAnswer);
|
||||
|
||||
try {
|
||||
const response = await globalThis.fetch("https://api.tavily.com/search", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json", "Authorization": "Bearer tvly-test-key" },
|
||||
body: JSON.stringify({ query: "what is typescript", max_results: 10, search_depth: "basic", include_answer: true }),
|
||||
});
|
||||
|
||||
const data = await response.json() as { answer?: string };
|
||||
|
||||
// Verify the answer is present
|
||||
assert.equal(data.answer, "TypeScript is a typed superset of JavaScript that compiles to plain JavaScript.");
|
||||
|
||||
// Verify the request included include_answer
|
||||
assert.equal(captured.body?.include_answer, true);
|
||||
|
||||
// The answer should flow to summaryText (not summarizerKey)
|
||||
const summaryText = data.answer || undefined;
|
||||
assert.ok(summaryText, "Answer should be truthy and used as summaryText");
|
||||
} finally {
|
||||
t.after(() => {
|
||||
restore();
|
||||
if (origKey !== undefined) process.env.TAVILY_API_KEY = origKey;
|
||||
else delete process.env.TAVILY_API_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
const response = await globalThis.fetch("https://api.tavily.com/search", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json", "Authorization": "Bearer tvly-test-key" },
|
||||
body: JSON.stringify({ query: "what is typescript", max_results: 10, search_depth: "basic", include_answer: true }),
|
||||
});
|
||||
|
||||
const data = await response.json() as { answer?: string };
|
||||
|
||||
// Verify the answer is present
|
||||
assert.equal(data.answer, "TypeScript is a typed superset of JavaScript that compiles to plain JavaScript.");
|
||||
|
||||
// Verify the request included include_answer
|
||||
assert.equal(captured.body?.include_answer, true);
|
||||
|
||||
// The answer should flow to summaryText (not summarizerKey)
|
||||
const summaryText = data.answer || undefined;
|
||||
assert.ok(summaryText, "Answer should be truthy and used as summaryText");
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
|
|
@ -305,40 +305,40 @@ test("freshness='week' maps to time_range='week' in Tavily request body", () =>
|
|||
// Test: Domain mapping — include_domains, not site: prefix
|
||||
// =============================================================================
|
||||
|
||||
test("Tavily domain filter uses include_domains, not site: prefix in query", async () => {
|
||||
test("Tavily domain filter uses include_domains, not site: prefix in query", async (t) => {
|
||||
const origKey = process.env.TAVILY_API_KEY;
|
||||
process.env.TAVILY_API_KEY = "tvly-test-key";
|
||||
|
||||
const { captured, restore } = mockFetch(makeTavilyResponse());
|
||||
|
||||
try {
|
||||
// Simulate what executeTavilySearch builds for domain filtering
|
||||
const domain = "example.com";
|
||||
const query = "typescript tutorial";
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
query, // Note: NO site: prefix
|
||||
max_results: 10,
|
||||
search_depth: "basic",
|
||||
include_domains: [domain],
|
||||
};
|
||||
|
||||
await globalThis.fetch("https://api.tavily.com/search", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json", "Authorization": "Bearer tvly-test-key" },
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
// Verify domain passed as include_domains, not in query
|
||||
assert.deepEqual(captured.body?.include_domains, ["example.com"]);
|
||||
assert.equal(captured.body?.query, "typescript tutorial", "Query must NOT contain site: prefix for Tavily");
|
||||
assert.ok(
|
||||
!(captured.body?.query as string).includes("site:"),
|
||||
"Query must not include site: prefix for Tavily path"
|
||||
);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
restore();
|
||||
if (origKey !== undefined) process.env.TAVILY_API_KEY = origKey;
|
||||
else delete process.env.TAVILY_API_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
// Simulate what executeTavilySearch builds for domain filtering
|
||||
const domain = "example.com";
|
||||
const query = "typescript tutorial";
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
query, // Note: NO site: prefix
|
||||
max_results: 10,
|
||||
search_depth: "basic",
|
||||
include_domains: [domain],
|
||||
};
|
||||
|
||||
await globalThis.fetch("https://api.tavily.com/search", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json", "Authorization": "Bearer tvly-test-key" },
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
// Verify domain passed as include_domains, not in query
|
||||
assert.deepEqual(captured.body?.include_domains, ["example.com"]);
|
||||
assert.equal(captured.body?.query, "typescript tutorial", "Query must NOT contain site: prefix for Tavily");
|
||||
assert.ok(
|
||||
!(captured.body?.query as string).includes("site:"),
|
||||
"Query must not include site: prefix for Tavily path"
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -26,24 +26,24 @@ function scanContent(
|
|||
const dir = mkdtempSync(join(tmpdir(), "secret-scan-test-"));
|
||||
try {
|
||||
// Initialize a git repo so `git diff --cached` works
|
||||
spawnSync("git", ["init"], { cwd: dir });
|
||||
spawnSync("git", ["config", "user.email", "test@test.com"], { cwd: dir });
|
||||
spawnSync("git", ["config", "user.name", "Test"], { cwd: dir });
|
||||
spawnSync("git", ["init"], { cwd: dir });
|
||||
spawnSync("git", ["config", "user.email", "test@test.com"], { cwd: dir });
|
||||
spawnSync("git", ["config", "user.name", "Test"], { cwd: dir });
|
||||
|
||||
// Write and stage the file
|
||||
const filePath = join(dir, filename);
|
||||
const parentDir = join(dir, ...filename.split("/").slice(0, -1));
|
||||
if (filename.includes("/")) {
|
||||
mkdirSync(parentDir, { recursive: true });
|
||||
}
|
||||
writeFileSync(filePath, content);
|
||||
spawnSync("git", ["add", filename], { cwd: dir });
|
||||
// Write and stage the file
|
||||
const filePath = join(dir, filename);
|
||||
const parentDir = join(dir, ...filename.split("/").slice(0, -1));
|
||||
if (filename.includes("/")) {
|
||||
mkdirSync(parentDir, { recursive: true });
|
||||
}
|
||||
writeFileSync(filePath, content);
|
||||
spawnSync("git", ["add", filename], { cwd: dir });
|
||||
|
||||
const result = spawnSync("bash", [scanScript], {
|
||||
cwd: dir,
|
||||
encoding: "utf-8",
|
||||
env: { ...process.env, TERM: "dumb" },
|
||||
});
|
||||
const result = spawnSync("bash", [scanScript], {
|
||||
cwd: dir,
|
||||
encoding: "utf-8",
|
||||
env: { ...process.env, TERM: "dumb" },
|
||||
});
|
||||
|
||||
return {
|
||||
status: result.status ?? 1,
|
||||
|
|
@ -153,19 +153,17 @@ test("skips package-lock.json", { skip: isWindows }, () => {
|
|||
assert.equal(result.status, 0, `should pass (lockfile skip): ${result.stdout}`);
|
||||
});
|
||||
|
||||
test("reports no files cleanly", { skip: isWindows }, () => {
|
||||
test("reports no files cleanly", { skip: isWindows }, (t) => {
|
||||
const dir = mkdtempSync(join(tmpdir(), "secret-scan-empty-"));
|
||||
try {
|
||||
spawnSync("git", ["init"], { cwd: dir });
|
||||
const result = spawnSync("bash", [scanScript], {
|
||||
cwd: dir,
|
||||
encoding: "utf-8",
|
||||
});
|
||||
assert.equal(result.status, 0);
|
||||
assert.match(result.stdout, /no files to scan/);
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
t.after(() => { rmSync(dir, { recursive: true, force: true }); });
|
||||
|
||||
spawnSync("git", ["init"], { cwd: dir });
|
||||
const result = spawnSync("bash", [scanScript], {
|
||||
cwd: dir,
|
||||
encoding: "utf-8",
|
||||
});
|
||||
assert.equal(result.status, 0);
|
||||
assert.match(result.stdout, /no files to scan/);
|
||||
});
|
||||
|
||||
// ── Multiple findings ────────────────────────────────────────────────
|
||||
|
|
@ -186,34 +184,32 @@ test("reports multiple secrets in one file", { skip: isWindows }, () => {
|
|||
|
||||
// ── CI mode (--diff) ─────────────────────────────────────────────────
|
||||
|
||||
test("CI mode scans diff against ref", { skip: isWindows }, () => {
|
||||
test("CI mode scans diff against ref", { skip: isWindows }, (t) => {
|
||||
const dir = mkdtempSync(join(tmpdir(), "secret-scan-ci-"));
|
||||
try {
|
||||
spawnSync("git", ["init"], { cwd: dir });
|
||||
spawnSync("git", ["config", "user.email", "test@test.com"], { cwd: dir });
|
||||
spawnSync("git", ["config", "user.name", "Test"], { cwd: dir });
|
||||
t.after(() => { rmSync(dir, { recursive: true, force: true }); });
|
||||
|
||||
// Create initial commit
|
||||
writeFileSync(join(dir, "clean.ts"), "const x = 1;");
|
||||
spawnSync("git", ["add", "."], { cwd: dir });
|
||||
spawnSync("git", ["commit", "-m", "init"], { cwd: dir });
|
||||
spawnSync("git", ["init"], { cwd: dir });
|
||||
spawnSync("git", ["config", "user.email", "test@test.com"], { cwd: dir });
|
||||
spawnSync("git", ["config", "user.name", "Test"], { cwd: dir });
|
||||
|
||||
// Add a file with a secret on a new commit
|
||||
writeFileSync(
|
||||
join(dir, "leaked.ts"),
|
||||
'const key = "AKIAIOSFODNN7EXAMPLE";',
|
||||
);
|
||||
spawnSync("git", ["add", "."], { cwd: dir });
|
||||
spawnSync("git", ["commit", "-m", "add leak"], { cwd: dir });
|
||||
// Create initial commit
|
||||
writeFileSync(join(dir, "clean.ts"), "const x = 1;");
|
||||
spawnSync("git", ["add", "."], { cwd: dir });
|
||||
spawnSync("git", ["commit", "-m", "init"], { cwd: dir });
|
||||
|
||||
const result = spawnSync("bash", [scanScript, "--diff", "HEAD~1"], {
|
||||
cwd: dir,
|
||||
encoding: "utf-8",
|
||||
});
|
||||
// Add a file with a secret on a new commit
|
||||
writeFileSync(
|
||||
join(dir, "leaked.ts"),
|
||||
'const key = "AKIAIOSFODNN7EXAMPLE";',
|
||||
);
|
||||
spawnSync("git", ["add", "."], { cwd: dir });
|
||||
spawnSync("git", ["commit", "-m", "add leak"], { cwd: dir });
|
||||
|
||||
assert.equal(result.status, 1, `CI mode should detect: ${result.stdout}`);
|
||||
assert.match(result.stdout, /AWS Access Key/);
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
const result = spawnSync("bash", [scanScript, "--diff", "HEAD~1"], {
|
||||
cwd: dir,
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
assert.equal(result.status, 1, `CI mode should detect: ${result.stdout}`);
|
||||
assert.match(result.stdout, /AWS Access Key/);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ test("isCmuxTerminal detects cmux env vars", () => {
|
|||
assert.equal(isCmuxTerminal({ TERM_PROGRAM: "ghostty" } as NodeJS.ProcessEnv), false);
|
||||
});
|
||||
|
||||
test("detectCapabilities treats cmux as kitty-capable", () => {
|
||||
test("detectCapabilities treats cmux as kitty-capable", (t) => {
|
||||
const originalEnv = process.env;
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
|
|
@ -16,15 +16,15 @@ test("detectCapabilities treats cmux as kitty-capable", () => {
|
|||
CMUX_SURFACE_ID: "surface:2",
|
||||
TERM_PROGRAM: "ghostty",
|
||||
};
|
||||
try {
|
||||
resetCapabilitiesCache();
|
||||
assert.deepEqual(detectCapabilities(), {
|
||||
images: "kitty",
|
||||
trueColor: true,
|
||||
hyperlinks: true,
|
||||
});
|
||||
} finally {
|
||||
t.after(() => {
|
||||
process.env = originalEnv;
|
||||
resetCapabilitiesCache();
|
||||
}
|
||||
});
|
||||
|
||||
resetCapabilitiesCache();
|
||||
assert.deepEqual(detectCapabilities(), {
|
||||
images: "kitty",
|
||||
trueColor: true,
|
||||
hyperlinks: true,
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -16,18 +16,16 @@ function makeExecutable(dir: string, name: string, content = "#!/bin/sh\nexit 0\
|
|||
return file;
|
||||
}
|
||||
|
||||
test("resolveToolFromPath finds fd via fdfind fallback", () => {
|
||||
test("resolveToolFromPath finds fd via fdfind fallback", (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), "gsd-tool-bootstrap-resolve-"));
|
||||
try {
|
||||
makeExecutable(tmp, "fdfind");
|
||||
const resolved = resolveToolFromPath("fd", tmp);
|
||||
assert.equal(resolved, join(tmp, "fdfind"));
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }); });
|
||||
|
||||
makeExecutable(tmp, "fdfind");
|
||||
const resolved = resolveToolFromPath("fd", tmp);
|
||||
assert.equal(resolved, join(tmp, "fdfind"));
|
||||
});
|
||||
|
||||
test("ensureManagedTools provisions fd and rg into managed bin dir", () => {
|
||||
test("ensureManagedTools provisions fd and rg into managed bin dir", (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), "gsd-tool-bootstrap-provision-"));
|
||||
const sourceBin = join(tmp, "source-bin");
|
||||
const targetBin = join(tmp, "target-bin");
|
||||
|
|
@ -35,23 +33,21 @@ test("ensureManagedTools provisions fd and rg into managed bin dir", () => {
|
|||
mkdirSync(sourceBin, { recursive: true });
|
||||
mkdirSync(targetBin, { recursive: true });
|
||||
|
||||
try {
|
||||
makeExecutable(sourceBin, "fdfind");
|
||||
makeExecutable(sourceBin, "rg");
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }); });
|
||||
|
||||
const provisioned = ensureManagedTools(targetBin, sourceBin);
|
||||
makeExecutable(sourceBin, "fdfind");
|
||||
makeExecutable(sourceBin, "rg");
|
||||
|
||||
assert.equal(provisioned.length, 2);
|
||||
assert.ok(existsSync(join(targetBin, FD_TARGET)));
|
||||
assert.ok(existsSync(join(targetBin, RG_TARGET)));
|
||||
assert.ok(lstatSync(join(targetBin, FD_TARGET)).isSymbolicLink() || lstatSync(join(targetBin, FD_TARGET)).isFile());
|
||||
assert.ok(lstatSync(join(targetBin, RG_TARGET)).isSymbolicLink() || lstatSync(join(targetBin, RG_TARGET)).isFile());
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
const provisioned = ensureManagedTools(targetBin, sourceBin);
|
||||
|
||||
assert.equal(provisioned.length, 2);
|
||||
assert.ok(existsSync(join(targetBin, FD_TARGET)));
|
||||
assert.ok(existsSync(join(targetBin, RG_TARGET)));
|
||||
assert.ok(lstatSync(join(targetBin, FD_TARGET)).isSymbolicLink() || lstatSync(join(targetBin, FD_TARGET)).isFile());
|
||||
assert.ok(lstatSync(join(targetBin, RG_TARGET)).isSymbolicLink() || lstatSync(join(targetBin, RG_TARGET)).isFile());
|
||||
});
|
||||
|
||||
test("ensureManagedTools copies executable when symlink target already exists as a broken link", () => {
|
||||
test("ensureManagedTools copies executable when symlink target already exists as a broken link", (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), "gsd-tool-bootstrap-copy-"));
|
||||
const sourceBin = join(tmp, "source-bin");
|
||||
const targetBin = join(tmp, "target-bin");
|
||||
|
|
@ -60,17 +56,15 @@ test("ensureManagedTools copies executable when symlink target already exists as
|
|||
mkdirSync(sourceBin, { recursive: true });
|
||||
mkdirSync(targetBin, { recursive: true });
|
||||
|
||||
try {
|
||||
makeExecutable(sourceBin, "fdfind", "#!/bin/sh\necho fd\n");
|
||||
makeExecutable(sourceBin, "rg", "#!/bin/sh\necho rg\n");
|
||||
symlinkSync(join(tmp, "missing-target"), targetFd);
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }); });
|
||||
|
||||
const provisioned = ensureManagedTools(targetBin, sourceBin);
|
||||
makeExecutable(sourceBin, "fdfind", "#!/bin/sh\necho fd\n");
|
||||
makeExecutable(sourceBin, "rg", "#!/bin/sh\necho rg\n");
|
||||
symlinkSync(join(tmp, "missing-target"), targetFd);
|
||||
|
||||
assert.equal(provisioned.length, 2);
|
||||
assert.ok(lstatSync(targetFd).isFile(), "fd fallback should replace broken symlink with a copied file");
|
||||
assert.match(readFileSync(targetFd, "utf8"), /echo fd/);
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
const provisioned = ensureManagedTools(targetBin, sourceBin);
|
||||
|
||||
assert.equal(provisioned.length, 2);
|
||||
assert.ok(lstatSync(targetFd).isFile(), "fd fallback should replace broken symlink with a copied file");
|
||||
assert.match(readFileSync(targetFd, "utf8"), /echo fd/);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -33,23 +33,22 @@ function writeRule(dir: string, name: string, frontmatter: string, body: string)
|
|||
// Project-local rule loading
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
test('loads rule from project .gsd/rules/', () => {
|
||||
test('loads rule from project .gsd/rules/', (t) => {
|
||||
const { cwd, projectDir, cleanup } = makeTmpProject()
|
||||
try {
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
writeRule(projectDir, 'no-console', 'condition:\n - "console\\.log"', 'Do not use console.log.')
|
||||
const rules = loadRules(cwd)
|
||||
const projectRule = rules.find(r => r.name === 'no-console')
|
||||
assert.ok(projectRule)
|
||||
assert.deepEqual(projectRule.condition, ['console\\.log'])
|
||||
assert.equal(projectRule.content, 'Do not use console.log.')
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
})
|
||||
|
||||
test('parses scope and globs from frontmatter', () => {
|
||||
test('parses scope and globs from frontmatter', (t) => {
|
||||
const { cwd, projectDir, cleanup } = makeTmpProject()
|
||||
try {
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
writeRule(
|
||||
projectDir,
|
||||
'scoped-rule',
|
||||
|
|
@ -61,69 +60,56 @@ test('parses scope and globs from frontmatter', () => {
|
|||
assert.ok(rule)
|
||||
assert.deepEqual(rule.scope, ['tool:edit', 'text'])
|
||||
assert.deepEqual(rule.globs, ['*.ts'])
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
})
|
||||
|
||||
test('skips files without valid frontmatter', () => {
|
||||
test('skips files without valid frontmatter', (t) => {
|
||||
const { cwd, projectDir, cleanup } = makeTmpProject()
|
||||
try {
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
mkdirSync(projectDir, { recursive: true })
|
||||
writeFileSync(join(projectDir, 'broken.md'), 'No frontmatter here.')
|
||||
const rules = loadRules(cwd)
|
||||
assert.equal(rules.filter(r => r.name === 'broken').length, 0)
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
})
|
||||
|
||||
test('skips rules with no condition', () => {
|
||||
test('skips rules with no condition', (t) => {
|
||||
const { cwd, projectDir, cleanup } = makeTmpProject()
|
||||
try {
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
writeRule(projectDir, 'no-condition', 'scope:\n - "text"', 'Missing condition field.')
|
||||
const rules = loadRules(cwd)
|
||||
assert.equal(rules.filter(r => r.name === 'no-condition').length, 0)
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
})
|
||||
|
||||
test('returns empty array when .gsd/rules/ does not exist', () => {
|
||||
test('returns empty array when .gsd/rules/ does not exist', (t) => {
|
||||
const { cwd, cleanup } = makeTmpProject()
|
||||
try {
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
// cwd exists but no .gsd/rules/ dir
|
||||
const rules = loadRules(cwd)
|
||||
// May include global rules from homedir — just verify no crash
|
||||
assert.ok(Array.isArray(rules))
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
})
|
||||
|
||||
test('loads multiple rules from same directory', () => {
|
||||
test('loads multiple rules from same directory', (t) => {
|
||||
const { cwd, projectDir, cleanup } = makeTmpProject()
|
||||
try {
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
writeRule(projectDir, 'rule-a', 'condition:\n - "alpha"', 'Alpha rule.')
|
||||
writeRule(projectDir, 'rule-b', 'condition:\n - "beta"', 'Beta rule.')
|
||||
const rules = loadRules(cwd)
|
||||
const names = rules.map(r => r.name)
|
||||
assert.ok(names.includes('rule-a'))
|
||||
assert.ok(names.includes('rule-b'))
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
})
|
||||
|
||||
test('handles quoted values in frontmatter', () => {
|
||||
test('handles quoted values in frontmatter', (t) => {
|
||||
const { cwd, projectDir, cleanup } = makeTmpProject()
|
||||
try {
|
||||
t.after(() => { cleanup() });
|
||||
|
||||
writeRule(projectDir, 'quoted', 'condition:\n - "console\\.log"\n - \'debugger\'', 'Quoted values.')
|
||||
const rules = loadRules(cwd)
|
||||
const rule = rules.find(r => r.name === 'quoted')
|
||||
assert.ok(rule)
|
||||
assert.deepEqual(rule.condition, ['console\\.log', 'debugger'])
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
})
|
||||
|
|
|
|||
|
|
@ -41,51 +41,43 @@ test('compareSemver handles versions with different segment counts', () => {
|
|||
// readUpdateCache / writeUpdateCache
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
test('readUpdateCache returns null for nonexistent file', () => {
|
||||
test('readUpdateCache returns null for nonexistent file', (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-cache-'))
|
||||
try {
|
||||
const result = readUpdateCache(join(tmp, 'nonexistent'))
|
||||
assert.equal(result, null)
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }) });
|
||||
|
||||
const result = readUpdateCache(join(tmp, 'nonexistent'))
|
||||
assert.equal(result, null)
|
||||
})
|
||||
|
||||
test('readUpdateCache returns null for malformed JSON', () => {
|
||||
test('readUpdateCache returns null for malformed JSON', (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-cache-'))
|
||||
try {
|
||||
const cachePath = join(tmp, '.update-check')
|
||||
writeFileSync(cachePath, 'not json')
|
||||
const result = readUpdateCache(cachePath)
|
||||
assert.equal(result, null)
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }) });
|
||||
|
||||
const cachePath = join(tmp, '.update-check')
|
||||
writeFileSync(cachePath, 'not json')
|
||||
const result = readUpdateCache(cachePath)
|
||||
assert.equal(result, null)
|
||||
})
|
||||
|
||||
test('writeUpdateCache + readUpdateCache round-trips correctly', () => {
|
||||
test('writeUpdateCache + readUpdateCache round-trips correctly', (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-cache-'))
|
||||
try {
|
||||
const cachePath = join(tmp, '.update-check')
|
||||
const cache = { lastCheck: Date.now(), latestVersion: '3.0.0' }
|
||||
writeUpdateCache(cache, cachePath)
|
||||
const result = readUpdateCache(cachePath)
|
||||
assert.deepEqual(result, cache)
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }) });
|
||||
|
||||
const cachePath = join(tmp, '.update-check')
|
||||
const cache = { lastCheck: Date.now(), latestVersion: '3.0.0' }
|
||||
writeUpdateCache(cache, cachePath)
|
||||
const result = readUpdateCache(cachePath)
|
||||
assert.deepEqual(result, cache)
|
||||
})
|
||||
|
||||
test('writeUpdateCache creates parent directories', () => {
|
||||
test('writeUpdateCache creates parent directories', (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-cache-'))
|
||||
try {
|
||||
const cachePath = join(tmp, 'nested', 'dir', '.update-check')
|
||||
writeUpdateCache({ lastCheck: Date.now(), latestVersion: '1.0.0' }, cachePath)
|
||||
const raw = readFileSync(cachePath, 'utf-8')
|
||||
assert.ok(raw.includes('1.0.0'))
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }) });
|
||||
|
||||
const cachePath = join(tmp, 'nested', 'dir', '.update-check')
|
||||
writeUpdateCache({ lastCheck: Date.now(), latestVersion: '1.0.0' }, cachePath)
|
||||
const raw = readFileSync(cachePath, 'utf-8')
|
||||
assert.ok(raw.includes('1.0.0'))
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
@ -108,105 +100,105 @@ function startMockRegistry(responseBody: object, statusCode = 200): Promise<{ ur
|
|||
})
|
||||
}
|
||||
|
||||
test('checkForUpdates calls onUpdate when newer version is available', async () => {
|
||||
test('checkForUpdates calls onUpdate when newer version is available', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-'))
|
||||
const registry = await startMockRegistry({ version: '99.0.0' })
|
||||
try {
|
||||
let called = false
|
||||
let reportedCurrent = ''
|
||||
let reportedLatest = ''
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: (current, latest) => {
|
||||
called = true
|
||||
reportedCurrent = current
|
||||
reportedLatest = latest
|
||||
},
|
||||
})
|
||||
|
||||
assert.ok(called, 'onUpdate should have been called')
|
||||
assert.equal(reportedCurrent, '1.0.0')
|
||||
assert.equal(reportedLatest, '99.0.0')
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await registry.close()
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
});
|
||||
|
||||
let called = false
|
||||
let reportedCurrent = ''
|
||||
let reportedLatest = ''
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: (current, latest) => {
|
||||
called = true
|
||||
reportedCurrent = current
|
||||
reportedLatest = latest
|
||||
},
|
||||
})
|
||||
|
||||
assert.ok(called, 'onUpdate should have been called')
|
||||
assert.equal(reportedCurrent, '1.0.0')
|
||||
assert.equal(reportedLatest, '99.0.0')
|
||||
})
|
||||
|
||||
test('checkForUpdates does not call onUpdate when already on latest', async () => {
|
||||
test('checkForUpdates does not call onUpdate when already on latest', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-'))
|
||||
const registry = await startMockRegistry({ version: '1.0.0' })
|
||||
try {
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called when versions match')
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await registry.close()
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
});
|
||||
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called when versions match')
|
||||
})
|
||||
|
||||
test('checkForUpdates does not call onUpdate when current is ahead', async () => {
|
||||
test('checkForUpdates does not call onUpdate when current is ahead', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-'))
|
||||
const registry = await startMockRegistry({ version: '1.0.0' })
|
||||
try {
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '2.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called when current is ahead')
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await registry.close()
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
});
|
||||
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '2.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called when current is ahead')
|
||||
})
|
||||
|
||||
test('checkForUpdates writes cache after successful fetch', async () => {
|
||||
test('checkForUpdates writes cache after successful fetch', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-'))
|
||||
const cachePath = join(tmp, '.update-check')
|
||||
const registry = await startMockRegistry({ version: '5.0.0' })
|
||||
try {
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath,
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => {},
|
||||
})
|
||||
|
||||
const cache = readUpdateCache(cachePath)
|
||||
assert.ok(cache, 'cache should exist after fetch')
|
||||
assert.equal(cache!.latestVersion, '5.0.0')
|
||||
assert.ok(cache!.lastCheck > 0)
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await registry.close()
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
});
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath,
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => {},
|
||||
})
|
||||
|
||||
const cache = readUpdateCache(cachePath)
|
||||
assert.ok(cache, 'cache should exist after fetch')
|
||||
assert.equal(cache!.latestVersion, '5.0.0')
|
||||
assert.ok(cache!.lastCheck > 0)
|
||||
})
|
||||
|
||||
test('checkForUpdates uses cache and skips fetch when checked recently', async () => {
|
||||
test('checkForUpdates uses cache and skips fetch when checked recently', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-'))
|
||||
const cachePath = join(tmp, '.update-check')
|
||||
// Write a fresh cache entry
|
||||
|
|
@ -214,114 +206,112 @@ test('checkForUpdates uses cache and skips fetch when checked recently', async (
|
|||
|
||||
// Start server that would return a different version — should NOT be reached
|
||||
const registry = await startMockRegistry({ version: '20.0.0' })
|
||||
try {
|
||||
let reportedLatest = ''
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath,
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 60 * 60 * 1000, // 1 hour
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: (_current, latest) => { reportedLatest = latest },
|
||||
})
|
||||
|
||||
// Should use cached version (10.0.0), not the server's (20.0.0)
|
||||
assert.equal(reportedLatest, '10.0.0')
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await registry.close()
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
});
|
||||
|
||||
let reportedLatest = ''
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath,
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 60 * 60 * 1000, // 1 hour
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: (_current, latest) => { reportedLatest = latest },
|
||||
})
|
||||
|
||||
// Should use cached version (10.0.0), not the server's (20.0.0)
|
||||
assert.equal(reportedLatest, '10.0.0')
|
||||
})
|
||||
|
||||
test('checkForUpdates skips notification when cache is fresh and versions match', async () => {
|
||||
test('checkForUpdates skips notification when cache is fresh and versions match', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-'))
|
||||
const cachePath = join(tmp, '.update-check')
|
||||
writeUpdateCache({ lastCheck: Date.now(), latestVersion: '1.0.0' }, cachePath)
|
||||
|
||||
try {
|
||||
let called = false
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }) });
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath,
|
||||
checkIntervalMs: 60 * 60 * 1000,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
let called = false
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called when cached version matches current')
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath,
|
||||
checkIntervalMs: 60 * 60 * 1000,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called when cached version matches current')
|
||||
})
|
||||
|
||||
test('checkForUpdates handles server error gracefully', async () => {
|
||||
test('checkForUpdates handles server error gracefully', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-'))
|
||||
const registry = await startMockRegistry({}, 500)
|
||||
try {
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called on server error')
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await registry.close()
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
});
|
||||
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called on server error')
|
||||
})
|
||||
|
||||
test('checkForUpdates handles network timeout gracefully', async () => {
|
||||
test('checkForUpdates handles network timeout gracefully', async (t) => {
|
||||
// Start a server that never responds
|
||||
const server = createServer(() => { /* intentionally never respond */ })
|
||||
await new Promise<void>((resolve) => server.listen(0, '127.0.0.1', resolve))
|
||||
const addr = server.address() as { port: number }
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-'))
|
||||
|
||||
try {
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: `http://127.0.0.1:${addr.port}`,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 500, // Very short timeout
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called on timeout')
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await new Promise<void>((r) => server.close(() => r()))
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
});
|
||||
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: `http://127.0.0.1:${addr.port}`,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 500, // Very short timeout
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called on timeout')
|
||||
})
|
||||
|
||||
test('checkForUpdates handles missing version field in response', async () => {
|
||||
test('checkForUpdates handles missing version field in response', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-update-'))
|
||||
const registry = await startMockRegistry({ name: 'gsd-pi' }) // no version field
|
||||
try {
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called when response has no version')
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await registry.close()
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
});
|
||||
|
||||
let called = false
|
||||
|
||||
await checkForUpdates({
|
||||
currentVersion: '1.0.0',
|
||||
cachePath: join(tmp, '.update-check'),
|
||||
registryUrl: registry.url,
|
||||
checkIntervalMs: 0,
|
||||
fetchTimeoutMs: 5000,
|
||||
onUpdate: () => { called = true },
|
||||
})
|
||||
|
||||
assert.ok(!called, 'onUpdate should not be called when response has no version')
|
||||
})
|
||||
|
|
|
|||
|
|
@ -259,7 +259,7 @@ async function readSseEvents(response: Response, count: number): Promise<any[]>
|
|||
return events;
|
||||
}
|
||||
|
||||
test("/api/boot returns current-project workspace data, resumable sessions, onboarding seam, and bridge snapshot", async () => {
|
||||
test("/api/boot returns current-project workspace data, resumable sessions, onboarding seam, and bridge snapshot", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-boot", "Resume Me");
|
||||
const harness = createHarness((command, current) => {
|
||||
|
|
@ -304,39 +304,39 @@ test("/api/boot returns current-project workspace data, resumable sessions, onbo
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await bootRoute.GET();
|
||||
assert.equal(response.status, 200);
|
||||
const payload = await response.json() as any;
|
||||
|
||||
assert.equal(payload.project.cwd, fixture.projectCwd);
|
||||
assert.equal(payload.project.sessionsDir, fixture.sessionsDir);
|
||||
assert.equal(payload.workspace.active.milestoneId, "M001");
|
||||
assert.equal(payload.workspace.active.sliceId, "S01");
|
||||
assert.equal(payload.workspace.active.taskId, "T01");
|
||||
assert.equal(payload.onboardingNeeded, false);
|
||||
assert.equal(payload.resumableSessions.length, 1);
|
||||
assert.equal(payload.resumableSessions[0].id, "sess-boot");
|
||||
assert.equal(payload.resumableSessions[0].path, sessionPath);
|
||||
assert.equal(payload.resumableSessions[0].isActive, true);
|
||||
assert.equal("firstMessage" in payload.resumableSessions[0], false);
|
||||
assert.equal("allMessagesText" in payload.resumableSessions[0], false);
|
||||
assert.equal("parentSessionPath" in payload.resumableSessions[0], false);
|
||||
assert.equal("depth" in payload.resumableSessions[0], false);
|
||||
assert.equal(payload.bridge.phase, "ready");
|
||||
assert.equal(payload.bridge.activeSessionId, "sess-boot");
|
||||
assert.equal(payload.bridge.sessionState.sessionId, "sess-boot");
|
||||
assert.equal(payload.bridge.sessionState.autoRetryEnabled, false);
|
||||
assert.equal(payload.bridge.sessionState.retryInProgress, false);
|
||||
assert.equal(payload.bridge.sessionState.retryAttempt, 0);
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const response = await bootRoute.GET();
|
||||
assert.equal(response.status, 200);
|
||||
const payload = await response.json() as any;
|
||||
|
||||
assert.equal(payload.project.cwd, fixture.projectCwd);
|
||||
assert.equal(payload.project.sessionsDir, fixture.sessionsDir);
|
||||
assert.equal(payload.workspace.active.milestoneId, "M001");
|
||||
assert.equal(payload.workspace.active.sliceId, "S01");
|
||||
assert.equal(payload.workspace.active.taskId, "T01");
|
||||
assert.equal(payload.onboardingNeeded, false);
|
||||
assert.equal(payload.resumableSessions.length, 1);
|
||||
assert.equal(payload.resumableSessions[0].id, "sess-boot");
|
||||
assert.equal(payload.resumableSessions[0].path, sessionPath);
|
||||
assert.equal(payload.resumableSessions[0].isActive, true);
|
||||
assert.equal("firstMessage" in payload.resumableSessions[0], false);
|
||||
assert.equal("allMessagesText" in payload.resumableSessions[0], false);
|
||||
assert.equal("parentSessionPath" in payload.resumableSessions[0], false);
|
||||
assert.equal("depth" in payload.resumableSessions[0], false);
|
||||
assert.equal(payload.bridge.phase, "ready");
|
||||
assert.equal(payload.bridge.activeSessionId, "sess-boot");
|
||||
assert.equal(payload.bridge.sessionState.sessionId, "sess-boot");
|
||||
assert.equal(payload.bridge.sessionState.autoRetryEnabled, false);
|
||||
assert.equal(payload.bridge.sessionState.retryInProgress, false);
|
||||
assert.equal(payload.bridge.sessionState.retryAttempt, 0);
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
});
|
||||
|
||||
test("/api/boot uses the authoritative auto helper by default and stays snapshot-shaped", async () => {
|
||||
test("/api/boot uses the authoritative auto helper by default and stays snapshot-shaped", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-auto", "Authoritative Auto");
|
||||
const authoritativeAuto = {
|
||||
|
|
@ -394,27 +394,27 @@ test("/api/boot uses the authoritative auto helper by default and stays snapshot
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await bootRoute.GET();
|
||||
assert.equal(response.status, 200);
|
||||
const payload = await response.json() as any;
|
||||
|
||||
assert.deepEqual(
|
||||
Object.keys(payload).sort(),
|
||||
["auto", "bridge", "onboarding", "onboardingNeeded", "project", "projectDetection", "resumableSessions", "workspace"],
|
||||
"/api/boot must remain snapshot-shaped while auto truth becomes authoritative",
|
||||
);
|
||||
assert.deepEqual(payload.auto, authoritativeAuto, "default boot path should read authoritative auto dashboard data");
|
||||
assert.notEqual(payload.auto.startTime, 0, "authoritative auto helper must replace the all-zero fallback payload");
|
||||
assert.equal("recovery" in payload, false, "/api/boot should not grow a recovery diagnostics payload in T01");
|
||||
assert.equal("liveState" in payload, false, "/api/boot should not expose live invalidation payloads directly");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const response = await bootRoute.GET();
|
||||
assert.equal(response.status, 200);
|
||||
const payload = await response.json() as any;
|
||||
|
||||
assert.deepEqual(
|
||||
Object.keys(payload).sort(),
|
||||
["auto", "bridge", "onboarding", "onboardingNeeded", "project", "projectDetection", "resumableSessions", "workspace"],
|
||||
"/api/boot must remain snapshot-shaped while auto truth becomes authoritative",
|
||||
);
|
||||
assert.deepEqual(payload.auto, authoritativeAuto, "default boot path should read authoritative auto dashboard data");
|
||||
assert.notEqual(payload.auto.startTime, 0, "authoritative auto helper must replace the all-zero fallback payload");
|
||||
assert.equal("recovery" in payload, false, "/api/boot should not grow a recovery diagnostics payload in T01");
|
||||
assert.equal("liveState" in payload, false, "/api/boot should not expose live invalidation payloads directly");
|
||||
});
|
||||
|
||||
test("bridge service is a singleton for the project runtime and /api/session/command forwards real RPC responses", async () => {
|
||||
test("bridge service is a singleton for the project runtime and /api/session/command forwards real RPC responses", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-shared", "Shared Session");
|
||||
const harness = createHarness((command, current) => {
|
||||
|
|
@ -459,40 +459,40 @@ test("bridge service is a singleton for the project runtime and /api/session/com
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const serviceA = bridge.getProjectBridgeService();
|
||||
const serviceB = bridge.getProjectBridgeService();
|
||||
assert.strictEqual(serviceA, serviceB);
|
||||
|
||||
const first = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_state" }),
|
||||
}),
|
||||
);
|
||||
const firstBody = await first.json() as any;
|
||||
assert.equal(first.status, 200);
|
||||
assert.equal(firstBody.success, true);
|
||||
assert.equal(firstBody.command, "get_state");
|
||||
assert.equal(firstBody.data.sessionId, "sess-shared");
|
||||
|
||||
const second = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_state" }),
|
||||
}),
|
||||
);
|
||||
const secondBody = await second.json() as any;
|
||||
assert.equal(second.status, 200);
|
||||
assert.equal(secondBody.data.sessionId, "sess-shared");
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const serviceA = bridge.getProjectBridgeService();
|
||||
const serviceB = bridge.getProjectBridgeService();
|
||||
assert.strictEqual(serviceA, serviceB);
|
||||
|
||||
const first = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_state" }),
|
||||
}),
|
||||
);
|
||||
const firstBody = await first.json() as any;
|
||||
assert.equal(first.status, 200);
|
||||
assert.equal(firstBody.success, true);
|
||||
assert.equal(firstBody.command, "get_state");
|
||||
assert.equal(firstBody.data.sessionId, "sess-shared");
|
||||
|
||||
const second = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_state" }),
|
||||
}),
|
||||
);
|
||||
const secondBody = await second.json() as any;
|
||||
assert.equal(second.status, 200);
|
||||
assert.equal(secondBody.data.sessionId, "sess-shared");
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
});
|
||||
|
||||
test("/api/session/events streams bridge status, agent events, and extension_ui_request payloads over SSE", async () => {
|
||||
test("/api/session/events streams bridge status, agent events, and extension_ui_request payloads over SSE", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-events", "Events Session");
|
||||
const harness = createHarness((command, current) => {
|
||||
|
|
@ -537,38 +537,38 @@ test("/api/session/events streams bridge status, agent events, and extension_ui_
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const response = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: controller.signal }),
|
||||
);
|
||||
|
||||
harness.emit({ type: "agent_start" });
|
||||
harness.emit({
|
||||
type: "extension_ui_request",
|
||||
id: "ui-1",
|
||||
method: "confirm",
|
||||
title: "Need approval",
|
||||
message: "Continue?",
|
||||
});
|
||||
|
||||
const events = await readSseEvents(response, 3);
|
||||
assert.equal(events[0].type, "bridge_status");
|
||||
assert.equal(events[0].bridge.connectionCount, 1);
|
||||
assert.ok(events.some((event) => event.type === "agent_start"));
|
||||
assert.ok(events.some((event) => event.type === "extension_ui_request"));
|
||||
|
||||
assert.equal(bridge.getProjectBridgeService().getSnapshot().connectionCount, 1);
|
||||
controller.abort();
|
||||
await waitForMicrotasks();
|
||||
assert.equal(bridge.getProjectBridgeService().getSnapshot().connectionCount, 0);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const controller = new AbortController();
|
||||
const response = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: controller.signal }),
|
||||
);
|
||||
|
||||
harness.emit({ type: "agent_start" });
|
||||
harness.emit({
|
||||
type: "extension_ui_request",
|
||||
id: "ui-1",
|
||||
method: "confirm",
|
||||
title: "Need approval",
|
||||
message: "Continue?",
|
||||
});
|
||||
|
||||
const events = await readSseEvents(response, 3);
|
||||
assert.equal(events[0].type, "bridge_status");
|
||||
assert.equal(events[0].bridge.connectionCount, 1);
|
||||
assert.ok(events.some((event) => event.type === "agent_start"));
|
||||
assert.ok(events.some((event) => event.type === "extension_ui_request"));
|
||||
|
||||
assert.equal(bridge.getProjectBridgeService().getSnapshot().connectionCount, 1);
|
||||
controller.abort();
|
||||
await waitForMicrotasks();
|
||||
assert.equal(bridge.getProjectBridgeService().getSnapshot().connectionCount, 0);
|
||||
});
|
||||
|
||||
test("bridge command/runtime failures are inspectable and redact secret material", async () => {
|
||||
test("bridge command/runtime failures are inspectable and redact secret material", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-failure", "Failure Session");
|
||||
|
||||
|
|
@ -631,33 +631,33 @@ test("bridge command/runtime failures are inspectable and redact secret material
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "bash", command: "echo test" }),
|
||||
}),
|
||||
);
|
||||
const body = await response.json() as any;
|
||||
|
||||
assert.equal(response.status, 502);
|
||||
assert.equal(body.success, false);
|
||||
assert.match(body.error, /authentication failed/i);
|
||||
assert.doesNotMatch(body.error, /sk-test-command-secret-9999/);
|
||||
|
||||
harness.stderr("fatal runtime error: sk-after-attach-12345");
|
||||
harness.exit(1);
|
||||
await waitForMicrotasks();
|
||||
|
||||
const snapshot = bridge.getProjectBridgeService().getSnapshot();
|
||||
assert.equal(snapshot.phase, "failed");
|
||||
assert.equal(snapshot.lastError?.afterSessionAttachment, true);
|
||||
assert.doesNotMatch(snapshot.lastError?.message ?? "", /sk-after-attach-12345|sk-test-command-secret-9999/);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const response = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "bash", command: "echo test" }),
|
||||
}),
|
||||
);
|
||||
const body = await response.json() as any;
|
||||
|
||||
assert.equal(response.status, 502);
|
||||
assert.equal(body.success, false);
|
||||
assert.match(body.error, /authentication failed/i);
|
||||
assert.doesNotMatch(body.error, /sk-test-command-secret-9999/);
|
||||
|
||||
harness.stderr("fatal runtime error: sk-after-attach-12345");
|
||||
harness.exit(1);
|
||||
await waitForMicrotasks();
|
||||
|
||||
const snapshot = bridge.getProjectBridgeService().getSnapshot();
|
||||
assert.equal(snapshot.phase, "failed");
|
||||
assert.equal(snapshot.lastError?.afterSessionAttachment, true);
|
||||
assert.doesNotMatch(snapshot.lastError?.message ?? "", /sk-after-attach-12345|sk-test-command-secret-9999/);
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
@ -665,7 +665,7 @@ test("bridge command/runtime failures are inspectable and redact secret material
|
|||
// (Fixes #1936: /api/boot returns 500 when readdirSync is missing)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
test("/api/boot lists sessions from the real filesystem via readdirSync (#1936)", async () => {
|
||||
test("/api/boot lists sessions from the real filesystem via readdirSync (#1936)", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-fs", "FS Session");
|
||||
const harness = createHarness((command, current) => {
|
||||
|
|
@ -712,24 +712,24 @@ test("/api/boot lists sessions from the real filesystem via readdirSync (#1936)"
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await bootRoute.GET();
|
||||
assert.equal(response.status, 200, "/api/boot must not return 500 — readdirSync must be available");
|
||||
const payload = await response.json() as any;
|
||||
|
||||
// The real listProjectSessions should have found the session file via readdirSync
|
||||
assert.ok(
|
||||
Array.isArray(payload.resumableSessions),
|
||||
"boot payload must include resumableSessions array",
|
||||
);
|
||||
assert.equal(
|
||||
payload.resumableSessions.length,
|
||||
1,
|
||||
"readdirSync-based session listing must find the test session file",
|
||||
);
|
||||
assert.equal(payload.resumableSessions[0].id, "sess-fs");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const response = await bootRoute.GET();
|
||||
assert.equal(response.status, 200, "/api/boot must not return 500 — readdirSync must be available");
|
||||
const payload = await response.json() as any;
|
||||
|
||||
// The real listProjectSessions should have found the session file via readdirSync
|
||||
assert.ok(
|
||||
Array.isArray(payload.resumableSessions),
|
||||
"boot payload must include resumableSessions array",
|
||||
);
|
||||
assert.equal(
|
||||
payload.resumableSessions.length,
|
||||
1,
|
||||
"readdirSync-based session listing must find the test session file",
|
||||
);
|
||||
assert.equal(payload.resumableSessions[0].id, "sess-fs");
|
||||
});
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ function createHarness(onCommand: (command: any, harness: ReturnType<typeof crea
|
|||
return harness;
|
||||
}
|
||||
|
||||
test("/api/bridge-terminal/stream attaches to the main bridge runtime and forwards native terminal output", async () => {
|
||||
test("/api/bridge-terminal/stream attaches to the main bridge runtime and forwards native terminal output", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const harness = createHarness((command, current) => {
|
||||
if (command.type === "get_state") {
|
||||
|
|
@ -197,25 +197,25 @@ test("/api/bridge-terminal/stream attaches to the main bridge runtime and forwar
|
|||
spawn: harness.spawn,
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await streamRoute.GET(
|
||||
new Request("http://localhost/api/bridge-terminal/stream?cols=132&rows=41"),
|
||||
);
|
||||
|
||||
const events = await readSseEvents(response, 2);
|
||||
assert.equal(events[0].type, "connected");
|
||||
assert.equal(events[1].type, "output");
|
||||
assert.match(events[1].data, /native main session/);
|
||||
|
||||
assert.ok(harness.commands.some((command) => command.type === "terminal_resize" && command.cols === 132 && command.rows === 41));
|
||||
assert.ok(harness.commands.some((command) => command.type === "terminal_redraw"));
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const response = await streamRoute.GET(
|
||||
new Request("http://localhost/api/bridge-terminal/stream?cols=132&rows=41"),
|
||||
);
|
||||
|
||||
const events = await readSseEvents(response, 2);
|
||||
assert.equal(events[0].type, "connected");
|
||||
assert.equal(events[1].type, "output");
|
||||
assert.match(events[1].data, /native main session/);
|
||||
|
||||
assert.ok(harness.commands.some((command) => command.type === "terminal_resize" && command.cols === 132 && command.rows === 41));
|
||||
assert.ok(harness.commands.some((command) => command.type === "terminal_redraw"));
|
||||
});
|
||||
|
||||
test("bridge-terminal input and resize routes forward browser terminal traffic onto the authoritative bridge session", async () => {
|
||||
test("bridge-terminal input and resize routes forward browser terminal traffic onto the authoritative bridge session", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const harness = createHarness((command, current) => {
|
||||
if (command.type === "get_state") {
|
||||
|
|
@ -266,32 +266,32 @@ test("bridge-terminal input and resize routes forward browser terminal traffic o
|
|||
spawn: harness.spawn,
|
||||
});
|
||||
|
||||
try {
|
||||
const inputResponse = await inputRoute.POST(
|
||||
new Request("http://localhost/api/bridge-terminal/input", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ data: "hello from xterm" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(inputResponse.status, 200);
|
||||
|
||||
const resizeResponse = await resizeRoute.POST(
|
||||
new Request("http://localhost/api/bridge-terminal/resize", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ cols: 140, rows: 48 }),
|
||||
}),
|
||||
);
|
||||
assert.equal(resizeResponse.status, 200);
|
||||
|
||||
assert.ok(harness.commands.some((command) => command.type === "terminal_input" && command.data === "hello from xterm"));
|
||||
assert.ok(harness.commands.some((command) => command.type === "terminal_resize" && command.cols === 140 && command.rows === 48));
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const inputResponse = await inputRoute.POST(
|
||||
new Request("http://localhost/api/bridge-terminal/input", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ data: "hello from xterm" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(inputResponse.status, 200);
|
||||
|
||||
const resizeResponse = await resizeRoute.POST(
|
||||
new Request("http://localhost/api/bridge-terminal/resize", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ cols: 140, rows: 48 }),
|
||||
}),
|
||||
);
|
||||
assert.equal(resizeResponse.status, 200);
|
||||
|
||||
assert.ok(harness.commands.some((command) => command.type === "terminal_input" && command.data === "hello from xterm"));
|
||||
assert.ok(harness.commands.some((command) => command.type === "terminal_resize" && command.cols === 140 && command.rows === 48));
|
||||
});
|
||||
|
||||
test("session_state_changed from the native main-session TUI refreshes bridge state and emits matching live invalidations", async () => {
|
||||
test("session_state_changed from the native main-session TUI refreshes bridge state and emits matching live invalidations", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionAPath = join(fixture.sessionsDir, "sess-a.jsonl");
|
||||
const sessionBPath = join(fixture.sessionsDir, "sess-b.jsonl");
|
||||
|
|
@ -338,30 +338,30 @@ test("session_state_changed from the native main-session TUI refreshes bridge st
|
|||
spawn: harness.spawn,
|
||||
});
|
||||
|
||||
try {
|
||||
const service = bridge.getProjectBridgeService();
|
||||
const unsubscribe = service.subscribe((event) => {
|
||||
seenEvents.push(event as { type?: string; reason?: string });
|
||||
});
|
||||
|
||||
await service.ensureStarted();
|
||||
activeSessionId = "sess-b";
|
||||
activeSessionFile = sessionBPath;
|
||||
harness.emit({ type: "session_state_changed", reason: "switch_session" });
|
||||
|
||||
await waitFor(() => {
|
||||
const snapshot = service.getSnapshot();
|
||||
return snapshot.activeSessionId === "sess-b" ? snapshot : null;
|
||||
});
|
||||
|
||||
assert.ok(
|
||||
seenEvents.some((event) => event.type === "live_state_invalidation" && event.reason === "switch_session"),
|
||||
"switch_session live_state_invalidation should be emitted when the native TUI changes the active session",
|
||||
);
|
||||
|
||||
unsubscribe();
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const service = bridge.getProjectBridgeService();
|
||||
const unsubscribe = service.subscribe((event) => {
|
||||
seenEvents.push(event as { type?: string; reason?: string });
|
||||
});
|
||||
|
||||
await service.ensureStarted();
|
||||
activeSessionId = "sess-b";
|
||||
activeSessionFile = sessionBPath;
|
||||
harness.emit({ type: "session_state_changed", reason: "switch_session" });
|
||||
|
||||
await waitFor(() => {
|
||||
const snapshot = service.getSnapshot();
|
||||
return snapshot.activeSessionId === "sess-b" ? snapshot : null;
|
||||
});
|
||||
|
||||
assert.ok(
|
||||
seenEvents.some((event) => event.type === "live_state_invalidation" && event.reason === "switch_session"),
|
||||
"switch_session live_state_invalidation should be emitted when the native TUI changes the active session",
|
||||
);
|
||||
|
||||
unsubscribe();
|
||||
});
|
||||
|
|
|
|||
|
|
@ -17,89 +17,83 @@ function makeFixture(paths: string[]): string {
|
|||
return root;
|
||||
}
|
||||
|
||||
test("resolveGsdCliEntry prefers the built loader for packaged standalone interactive sessions", () => {
|
||||
test("resolveGsdCliEntry prefers the built loader for packaged standalone interactive sessions", (t) => {
|
||||
const packageRoot = makeFixture([
|
||||
"dist/loader.js",
|
||||
"src/loader.ts",
|
||||
"src/resources/extensions/gsd/tests/resolve-ts.mjs",
|
||||
]);
|
||||
|
||||
try {
|
||||
const entry = resolveGsdCliEntry({
|
||||
packageRoot,
|
||||
cwd: "/tmp/project-a",
|
||||
execPath: "/custom/node",
|
||||
hostKind: "packaged-standalone",
|
||||
mode: "interactive",
|
||||
});
|
||||
t.after(() => { rmSync(packageRoot, { recursive: true, force: true }); });
|
||||
|
||||
assert.deepEqual(entry, {
|
||||
command: "/custom/node",
|
||||
args: [join(packageRoot, "dist", "loader.js")],
|
||||
cwd: "/tmp/project-a",
|
||||
});
|
||||
} finally {
|
||||
rmSync(packageRoot, { recursive: true, force: true });
|
||||
}
|
||||
const entry = resolveGsdCliEntry({
|
||||
packageRoot,
|
||||
cwd: "/tmp/project-a",
|
||||
execPath: "/custom/node",
|
||||
hostKind: "packaged-standalone",
|
||||
mode: "interactive",
|
||||
});
|
||||
|
||||
assert.deepEqual(entry, {
|
||||
command: "/custom/node",
|
||||
args: [join(packageRoot, "dist", "loader.js")],
|
||||
cwd: "/tmp/project-a",
|
||||
});
|
||||
});
|
||||
|
||||
test("resolveGsdCliEntry prefers the source loader for source-dev interactive sessions", () => {
|
||||
test("resolveGsdCliEntry prefers the source loader for source-dev interactive sessions", (t) => {
|
||||
const packageRoot = makeFixture([
|
||||
"dist/loader.js",
|
||||
"src/loader.ts",
|
||||
"src/resources/extensions/gsd/tests/resolve-ts.mjs",
|
||||
]);
|
||||
|
||||
try {
|
||||
const entry = resolveGsdCliEntry({
|
||||
packageRoot,
|
||||
cwd: "/tmp/project-b",
|
||||
execPath: "/custom/node",
|
||||
hostKind: "source-dev",
|
||||
mode: "interactive",
|
||||
});
|
||||
t.after(() => { rmSync(packageRoot, { recursive: true, force: true }); });
|
||||
|
||||
assert.deepEqual(entry, {
|
||||
command: "/custom/node",
|
||||
args: [
|
||||
"--import",
|
||||
pathToFileURL(join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs")).href,
|
||||
"--experimental-strip-types",
|
||||
join(packageRoot, "src", "loader.ts"),
|
||||
],
|
||||
cwd: "/tmp/project-b",
|
||||
});
|
||||
} finally {
|
||||
rmSync(packageRoot, { recursive: true, force: true });
|
||||
}
|
||||
const entry = resolveGsdCliEntry({
|
||||
packageRoot,
|
||||
cwd: "/tmp/project-b",
|
||||
execPath: "/custom/node",
|
||||
hostKind: "source-dev",
|
||||
mode: "interactive",
|
||||
});
|
||||
|
||||
assert.deepEqual(entry, {
|
||||
command: "/custom/node",
|
||||
args: [
|
||||
"--import",
|
||||
pathToFileURL(join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs")).href,
|
||||
"--experimental-strip-types",
|
||||
join(packageRoot, "src", "loader.ts"),
|
||||
],
|
||||
cwd: "/tmp/project-b",
|
||||
});
|
||||
});
|
||||
|
||||
test("resolveGsdCliEntry appends rpc arguments for bridge sessions", () => {
|
||||
test("resolveGsdCliEntry appends rpc arguments for bridge sessions", (t) => {
|
||||
const packageRoot = makeFixture(["dist/loader.js"]);
|
||||
|
||||
try {
|
||||
const entry = resolveGsdCliEntry({
|
||||
packageRoot,
|
||||
cwd: "/tmp/project-c",
|
||||
execPath: "/custom/node",
|
||||
hostKind: "packaged-standalone",
|
||||
mode: "rpc",
|
||||
sessionDir: "/tmp/.gsd/sessions/project-c",
|
||||
});
|
||||
t.after(() => { rmSync(packageRoot, { recursive: true, force: true }); });
|
||||
|
||||
assert.deepEqual(entry, {
|
||||
command: "/custom/node",
|
||||
args: [
|
||||
join(packageRoot, "dist", "loader.js"),
|
||||
"--mode",
|
||||
"rpc",
|
||||
"--continue",
|
||||
"--session-dir",
|
||||
"/tmp/.gsd/sessions/project-c",
|
||||
],
|
||||
cwd: "/tmp/project-c",
|
||||
});
|
||||
} finally {
|
||||
rmSync(packageRoot, { recursive: true, force: true });
|
||||
}
|
||||
const entry = resolveGsdCliEntry({
|
||||
packageRoot,
|
||||
cwd: "/tmp/project-c",
|
||||
execPath: "/custom/node",
|
||||
hostKind: "packaged-standalone",
|
||||
mode: "rpc",
|
||||
sessionDir: "/tmp/.gsd/sessions/project-c",
|
||||
});
|
||||
|
||||
assert.deepEqual(entry, {
|
||||
command: "/custom/node",
|
||||
args: [
|
||||
join(packageRoot, "dist", "loader.js"),
|
||||
"--mode",
|
||||
"rpc",
|
||||
"--continue",
|
||||
"--session-dir",
|
||||
"/tmp/.gsd/sessions/project-c",
|
||||
],
|
||||
cwd: "/tmp/project-c",
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -373,7 +373,7 @@ function routeEvent(state: MinimalLiveState, event: any): MinimalLiveState {
|
|||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
test("(a) SSE emits extension_ui_request with method 'select' → typed payload with options and allowMultiple", async () => {
|
||||
test("(a) SSE emits extension_ui_request with method 'select' → typed payload with options and allowMultiple", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-ui", "UI Session");
|
||||
const harness = createHarness((command, current) => {
|
||||
|
|
@ -392,46 +392,46 @@ test("(a) SSE emits extension_ui_request with method 'select' → typed payload
|
|||
|
||||
setupBridge(harness, fixture);
|
||||
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const response = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: controller.signal }),
|
||||
);
|
||||
|
||||
harness.emit({
|
||||
type: "extension_ui_request",
|
||||
id: "req-select-1",
|
||||
method: "select",
|
||||
title: "Choose a file",
|
||||
options: ["file-a.ts", "file-b.ts", "file-c.ts"],
|
||||
allowMultiple: true,
|
||||
});
|
||||
|
||||
const events = await readSseEvents(response, 2); // bridge_status + the UI request
|
||||
controller.abort();
|
||||
await waitForMicrotasks();
|
||||
|
||||
const uiEvent = events.find((e) => e.type === "extension_ui_request");
|
||||
assert.ok(uiEvent, "extension_ui_request event received via SSE");
|
||||
assert.equal(uiEvent.id, "req-select-1");
|
||||
assert.equal(uiEvent.method, "select");
|
||||
assert.equal(uiEvent.title, "Choose a file");
|
||||
assert.deepEqual(uiEvent.options, ["file-a.ts", "file-b.ts", "file-c.ts"]);
|
||||
assert.equal(uiEvent.allowMultiple, true);
|
||||
|
||||
// Verify store routing: select is a blocking method → should queue
|
||||
let state = createMinimalLiveState();
|
||||
state = routeEvent(state, uiEvent);
|
||||
assert.equal(state.pendingUiRequests.length, 1);
|
||||
assert.equal(state.pendingUiRequests[0].id, "req-select-1");
|
||||
assert.equal(state.pendingUiRequests[0].method, "select");
|
||||
assert.deepEqual(state.pendingUiRequests[0].options, ["file-a.ts", "file-b.ts", "file-c.ts"]);
|
||||
assert.equal(state.pendingUiRequests[0].allowMultiple, true);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const controller = new AbortController();
|
||||
const response = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: controller.signal }),
|
||||
);
|
||||
|
||||
harness.emit({
|
||||
type: "extension_ui_request",
|
||||
id: "req-select-1",
|
||||
method: "select",
|
||||
title: "Choose a file",
|
||||
options: ["file-a.ts", "file-b.ts", "file-c.ts"],
|
||||
allowMultiple: true,
|
||||
});
|
||||
|
||||
const events = await readSseEvents(response, 2); // bridge_status + the UI request
|
||||
controller.abort();
|
||||
await waitForMicrotasks();
|
||||
|
||||
const uiEvent = events.find((e) => e.type === "extension_ui_request");
|
||||
assert.ok(uiEvent, "extension_ui_request event received via SSE");
|
||||
assert.equal(uiEvent.id, "req-select-1");
|
||||
assert.equal(uiEvent.method, "select");
|
||||
assert.equal(uiEvent.title, "Choose a file");
|
||||
assert.deepEqual(uiEvent.options, ["file-a.ts", "file-b.ts", "file-c.ts"]);
|
||||
assert.equal(uiEvent.allowMultiple, true);
|
||||
|
||||
// Verify store routing: select is a blocking method → should queue
|
||||
let state = createMinimalLiveState();
|
||||
state = routeEvent(state, uiEvent);
|
||||
assert.equal(state.pendingUiRequests.length, 1);
|
||||
assert.equal(state.pendingUiRequests[0].id, "req-select-1");
|
||||
assert.equal(state.pendingUiRequests[0].method, "select");
|
||||
assert.deepEqual(state.pendingUiRequests[0].options, ["file-a.ts", "file-b.ts", "file-c.ts"]);
|
||||
assert.equal(state.pendingUiRequests[0].allowMultiple, true);
|
||||
});
|
||||
|
||||
test("(b) Multiple concurrent UI requests queue correctly keyed by id", async () => {
|
||||
|
|
@ -480,7 +480,7 @@ test("(b) Multiple concurrent UI requests queue correctly keyed by id", async ()
|
|||
assert.equal(state.pendingUiRequests[3].prefill, "initial text");
|
||||
});
|
||||
|
||||
test("(c) Responding to a UI request posts extension_ui_response with correct id and value to the bridge", async () => {
|
||||
test("(c) Responding to a UI request posts extension_ui_response with correct id and value to the bridge", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-respond", "Respond Session");
|
||||
const harness = createHarness((command, current) => {
|
||||
|
|
@ -499,33 +499,33 @@ test("(c) Responding to a UI request posts extension_ui_response with correct id
|
|||
|
||||
setupBridge(harness, fixture);
|
||||
|
||||
try {
|
||||
// Post an extension_ui_response via the command route
|
||||
const response = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "extension_ui_response", id: "req-42", value: "option-b" }),
|
||||
}),
|
||||
);
|
||||
|
||||
// extension_ui_response returns { ok: true } (202) because it's fire-and-forget
|
||||
assert.equal(response.status, 202);
|
||||
|
||||
await waitForMicrotasks();
|
||||
|
||||
// Verify the command was written to the bridge's stdin
|
||||
const uiResponseCmd = harness.commands.find((c) => c.type === "extension_ui_response");
|
||||
assert.ok(uiResponseCmd, "extension_ui_response was sent to the bridge");
|
||||
assert.equal(uiResponseCmd.id, "req-42");
|
||||
assert.equal(uiResponseCmd.value, "option-b");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
// Post an extension_ui_response via the command route
|
||||
const response = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "extension_ui_response", id: "req-42", value: "option-b" }),
|
||||
}),
|
||||
);
|
||||
|
||||
// extension_ui_response returns { ok: true } (202) because it's fire-and-forget
|
||||
assert.equal(response.status, 202);
|
||||
|
||||
await waitForMicrotasks();
|
||||
|
||||
// Verify the command was written to the bridge's stdin
|
||||
const uiResponseCmd = harness.commands.find((c) => c.type === "extension_ui_response");
|
||||
assert.ok(uiResponseCmd, "extension_ui_response was sent to the bridge");
|
||||
assert.equal(uiResponseCmd.id, "req-42");
|
||||
assert.equal(uiResponseCmd.value, "option-b");
|
||||
});
|
||||
|
||||
test("(d) Dismissing a UI request posts cancelled: true and removes from pending", async () => {
|
||||
test("(d) Dismissing a UI request posts cancelled: true and removes from pending", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-dismiss", "Dismiss Session");
|
||||
const harness = createHarness((command, current) => {
|
||||
|
|
@ -543,48 +543,48 @@ test("(d) Dismissing a UI request posts cancelled: true and removes from pending
|
|||
|
||||
setupBridge(harness, fixture);
|
||||
|
||||
try {
|
||||
// Post a cancel response
|
||||
const response = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "extension_ui_response", id: "req-99", cancelled: true }),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(response.status, 202);
|
||||
await waitForMicrotasks();
|
||||
|
||||
const cancelCmd = harness.commands.find((c) => c.type === "extension_ui_response" && c.cancelled === true);
|
||||
assert.ok(cancelCmd, "cancellation extension_ui_response was sent to the bridge");
|
||||
assert.equal(cancelCmd.id, "req-99");
|
||||
assert.equal(cancelCmd.cancelled, true);
|
||||
|
||||
// Verify store routing: removing from pending queue
|
||||
let state = createMinimalLiveState();
|
||||
state = routeEvent(state, {
|
||||
type: "extension_ui_request",
|
||||
id: "req-99",
|
||||
method: "confirm",
|
||||
title: "Confirm?",
|
||||
message: "Really?",
|
||||
});
|
||||
assert.equal(state.pendingUiRequests.length, 1);
|
||||
|
||||
// Simulate removal (mirrors store's dismissUiRequest behavior)
|
||||
state = {
|
||||
...state,
|
||||
pendingUiRequests: state.pendingUiRequests.filter((r: any) => r.id !== "req-99"),
|
||||
};
|
||||
assert.equal(state.pendingUiRequests.length, 0);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
// Post a cancel response
|
||||
const response = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "extension_ui_response", id: "req-99", cancelled: true }),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(response.status, 202);
|
||||
await waitForMicrotasks();
|
||||
|
||||
const cancelCmd = harness.commands.find((c) => c.type === "extension_ui_response" && c.cancelled === true);
|
||||
assert.ok(cancelCmd, "cancellation extension_ui_response was sent to the bridge");
|
||||
assert.equal(cancelCmd.id, "req-99");
|
||||
assert.equal(cancelCmd.cancelled, true);
|
||||
|
||||
// Verify store routing: removing from pending queue
|
||||
let state = createMinimalLiveState();
|
||||
state = routeEvent(state, {
|
||||
type: "extension_ui_request",
|
||||
id: "req-99",
|
||||
method: "confirm",
|
||||
title: "Confirm?",
|
||||
message: "Really?",
|
||||
});
|
||||
assert.equal(state.pendingUiRequests.length, 1);
|
||||
|
||||
// Simulate removal (mirrors store's dismissUiRequest behavior)
|
||||
state = {
|
||||
...state,
|
||||
pendingUiRequests: state.pendingUiRequests.filter((r: any) => r.id !== "req-99"),
|
||||
};
|
||||
assert.equal(state.pendingUiRequests.length, 0);
|
||||
});
|
||||
|
||||
test("(e) SSE emits message_update with text delta → streamingAssistantText accumulates", async () => {
|
||||
test("(e) SSE emits message_update with text delta → streamingAssistantText accumulates", async (t) => {
|
||||
let state = createMinimalLiveState();
|
||||
|
||||
state = routeEvent(state, {
|
||||
|
|
@ -625,31 +625,31 @@ test("(e) SSE emits message_update with text delta → streamingAssistantText ac
|
|||
|
||||
setupBridge(harness, fixture);
|
||||
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const response = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: controller.signal }),
|
||||
);
|
||||
|
||||
harness.emit({
|
||||
type: "message_update",
|
||||
message: { role: "assistant", content: [] },
|
||||
assistantMessageEvent: { type: "text_delta", delta: "streamed text", contentIndex: 0, partial: {} },
|
||||
});
|
||||
|
||||
const events = await readSseEvents(response, 2); // bridge_status + message_update
|
||||
controller.abort();
|
||||
await waitForMicrotasks();
|
||||
|
||||
const msgEvent = events.find((e) => e.type === "message_update");
|
||||
assert.ok(msgEvent, "message_update event received via SSE");
|
||||
assert.equal(msgEvent.assistantMessageEvent.type, "text_delta");
|
||||
assert.equal(msgEvent.assistantMessageEvent.delta, "streamed text");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const controller = new AbortController();
|
||||
const response = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: controller.signal }),
|
||||
);
|
||||
|
||||
harness.emit({
|
||||
type: "message_update",
|
||||
message: { role: "assistant", content: [] },
|
||||
assistantMessageEvent: { type: "text_delta", delta: "streamed text", contentIndex: 0, partial: {} },
|
||||
});
|
||||
|
||||
const events = await readSseEvents(response, 2); // bridge_status + message_update
|
||||
controller.abort();
|
||||
await waitForMicrotasks();
|
||||
|
||||
const msgEvent = events.find((e) => e.type === "message_update");
|
||||
assert.ok(msgEvent, "message_update event received via SSE");
|
||||
assert.equal(msgEvent.assistantMessageEvent.type, "text_delta");
|
||||
assert.equal(msgEvent.assistantMessageEvent.delta, "streamed text");
|
||||
});
|
||||
|
||||
test("(f) agent_end moves streaming text to transcript and resets streaming text", async () => {
|
||||
|
|
@ -813,7 +813,7 @@ test("(g-2) tool_execution_start/end update activeToolExecution", async () => {
|
|||
assert.equal(state.activeToolExecution, null);
|
||||
});
|
||||
|
||||
test("(h) steer and abort commands post the correct RPC command type", async () => {
|
||||
test("(h) steer and abort commands post the correct RPC command type", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-steer", "Steer Session");
|
||||
const harness = createHarness((command, current) => {
|
||||
|
|
@ -853,43 +853,43 @@ test("(h) steer and abort commands post the correct RPC command type", async ()
|
|||
|
||||
setupBridge(harness, fixture);
|
||||
|
||||
try {
|
||||
// Send steer command
|
||||
const steerResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "steer", message: "focus on the login flow" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(steerResponse.status, 200);
|
||||
const steerBody = await steerResponse.json() as any;
|
||||
assert.equal(steerBody.success, true);
|
||||
assert.equal(steerBody.command, "steer");
|
||||
|
||||
// Verify steer command reached the bridge with the correct shape
|
||||
const steerCmd = harness.commands.find((c) => c.type === "steer");
|
||||
assert.ok(steerCmd, "steer command was sent to the bridge");
|
||||
assert.equal(steerCmd.message, "focus on the login flow");
|
||||
|
||||
// Send abort command
|
||||
const abortResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "abort" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(abortResponse.status, 200);
|
||||
const abortBody = await abortResponse.json() as any;
|
||||
assert.equal(abortBody.success, true);
|
||||
assert.equal(abortBody.command, "abort");
|
||||
|
||||
const abortCmd = harness.commands.find((c) => c.type === "abort");
|
||||
assert.ok(abortCmd, "abort command was sent to the bridge");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
// Send steer command
|
||||
const steerResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "steer", message: "focus on the login flow" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(steerResponse.status, 200);
|
||||
const steerBody = await steerResponse.json() as any;
|
||||
assert.equal(steerBody.success, true);
|
||||
assert.equal(steerBody.command, "steer");
|
||||
|
||||
// Verify steer command reached the bridge with the correct shape
|
||||
const steerCmd = harness.commands.find((c) => c.type === "steer");
|
||||
assert.ok(steerCmd, "steer command was sent to the bridge");
|
||||
assert.equal(steerCmd.message, "focus on the login flow");
|
||||
|
||||
// Send abort command
|
||||
const abortResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "abort" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(abortResponse.status, 200);
|
||||
const abortBody = await abortResponse.json() as any;
|
||||
assert.equal(abortBody.success, true);
|
||||
assert.equal(abortBody.command, "abort");
|
||||
|
||||
const abortCmd = harness.commands.find((c) => c.type === "abort");
|
||||
assert.ok(abortCmd, "abort command was sent to the bridge");
|
||||
});
|
||||
|
||||
test("(failure-path) UI response errors are visible as lastClientError and pending requests persist on failure", async () => {
|
||||
|
|
@ -920,7 +920,7 @@ test("(failure-path) UI response errors are visible as lastClientError and pendi
|
|||
assert.equal(successState.pendingUiRequests.length, 0, "request removed on success");
|
||||
});
|
||||
|
||||
test("(session-controls) browser session RPCs round-trip through /api/session/command", async () => {
|
||||
test("(session-controls) browser session RPCs round-trip through /api/session/command", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const activeSessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-session", "Session Surface");
|
||||
const nextSessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-next", "Next Session");
|
||||
|
|
@ -1036,85 +1036,85 @@ test("(session-controls) browser session RPCs round-trip through /api/session/co
|
|||
|
||||
setupBridge(harness, fixture);
|
||||
|
||||
try {
|
||||
const sessionResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_session_stats" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(sessionResponse.status, 200);
|
||||
const sessionBody = await sessionResponse.json() as any;
|
||||
assert.equal(sessionBody.success, true);
|
||||
assert.equal(sessionBody.command, "get_session_stats");
|
||||
assert.equal(sessionBody.data.sessionId, "sess-session");
|
||||
assert.equal(sessionBody.data.tokens.total, 4600);
|
||||
|
||||
const exportResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "export_html", outputPath: exportPath }),
|
||||
}),
|
||||
);
|
||||
assert.equal(exportResponse.status, 200);
|
||||
const exportBody = await exportResponse.json() as any;
|
||||
assert.equal(exportBody.success, true);
|
||||
assert.equal(exportBody.data.path, exportPath);
|
||||
|
||||
const switchResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "switch_session", sessionPath: nextSessionPath }),
|
||||
}),
|
||||
);
|
||||
assert.equal(switchResponse.status, 200);
|
||||
const switchBody = await switchResponse.json() as any;
|
||||
assert.equal(switchBody.success, true);
|
||||
assert.equal(switchBody.data.cancelled, false);
|
||||
|
||||
const forkMessagesResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_fork_messages" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(forkMessagesResponse.status, 200);
|
||||
const forkMessagesBody = await forkMessagesResponse.json() as any;
|
||||
assert.equal(forkMessagesBody.success, true);
|
||||
assert.deepEqual(forkMessagesBody.data.messages, forkMessages);
|
||||
|
||||
const forkResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "fork", entryId: "entry-2" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(forkResponse.status, 200);
|
||||
const forkBody = await forkResponse.json() as any;
|
||||
assert.equal(forkBody.success, true);
|
||||
assert.equal(forkBody.data.cancelled, false);
|
||||
assert.equal(forkBody.data.text, "Fix the slash-command dispatcher");
|
||||
|
||||
const compactResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "compact", customInstructions: "Preserve blockers and current task state" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(compactResponse.status, 200);
|
||||
const compactBody = await compactResponse.json() as any;
|
||||
assert.equal(compactBody.success, true);
|
||||
assert.equal(compactBody.data.summary, "Compacted summary");
|
||||
assert.equal(compactBody.data.tokensBefore, 14200);
|
||||
|
||||
assert.deepEqual(
|
||||
harness.commands.filter((command) => command.type !== "get_state").map((command) => command.type),
|
||||
["get_session_stats", "export_html", "switch_session", "get_fork_messages", "fork", "compact"],
|
||||
"browser session controls should hit the live command route with the expected RPC sequence",
|
||||
);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const sessionResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_session_stats" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(sessionResponse.status, 200);
|
||||
const sessionBody = await sessionResponse.json() as any;
|
||||
assert.equal(sessionBody.success, true);
|
||||
assert.equal(sessionBody.command, "get_session_stats");
|
||||
assert.equal(sessionBody.data.sessionId, "sess-session");
|
||||
assert.equal(sessionBody.data.tokens.total, 4600);
|
||||
|
||||
const exportResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "export_html", outputPath: exportPath }),
|
||||
}),
|
||||
);
|
||||
assert.equal(exportResponse.status, 200);
|
||||
const exportBody = await exportResponse.json() as any;
|
||||
assert.equal(exportBody.success, true);
|
||||
assert.equal(exportBody.data.path, exportPath);
|
||||
|
||||
const switchResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "switch_session", sessionPath: nextSessionPath }),
|
||||
}),
|
||||
);
|
||||
assert.equal(switchResponse.status, 200);
|
||||
const switchBody = await switchResponse.json() as any;
|
||||
assert.equal(switchBody.success, true);
|
||||
assert.equal(switchBody.data.cancelled, false);
|
||||
|
||||
const forkMessagesResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_fork_messages" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(forkMessagesResponse.status, 200);
|
||||
const forkMessagesBody = await forkMessagesResponse.json() as any;
|
||||
assert.equal(forkMessagesBody.success, true);
|
||||
assert.deepEqual(forkMessagesBody.data.messages, forkMessages);
|
||||
|
||||
const forkResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "fork", entryId: "entry-2" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(forkResponse.status, 200);
|
||||
const forkBody = await forkResponse.json() as any;
|
||||
assert.equal(forkBody.success, true);
|
||||
assert.equal(forkBody.data.cancelled, false);
|
||||
assert.equal(forkBody.data.text, "Fix the slash-command dispatcher");
|
||||
|
||||
const compactResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "compact", customInstructions: "Preserve blockers and current task state" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(compactResponse.status, 200);
|
||||
const compactBody = await compactResponse.json() as any;
|
||||
assert.equal(compactBody.success, true);
|
||||
assert.equal(compactBody.data.summary, "Compacted summary");
|
||||
assert.equal(compactBody.data.tokensBefore, 14200);
|
||||
|
||||
assert.deepEqual(
|
||||
harness.commands.filter((command) => command.type !== "get_state").map((command) => command.type),
|
||||
["get_session_stats", "export_html", "switch_session", "get_fork_messages", "fork", "compact"],
|
||||
"browser session controls should hit the live command route with the expected RPC sequence",
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -355,7 +355,7 @@ async function readSseEventsUntil(
|
|||
throw new Error("Timed out waiting for the expected SSE contract events");
|
||||
}
|
||||
|
||||
test("/api/session/events exposes explicit live_state_invalidation events for agent and auto recovery boundaries", async () => {
|
||||
test("/api/session/events exposes explicit live_state_invalidation events for agent and auto recovery boundaries", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const sessionPath = createSessionFile(
|
||||
fixture.projectCwd,
|
||||
|
|
@ -381,55 +381,55 @@ test("/api/session/events exposes explicit live_state_invalidation events for ag
|
|||
|
||||
setupBridge(harness, fixture);
|
||||
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const response = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: controller.signal }),
|
||||
);
|
||||
|
||||
harness.emit({ type: "agent_end" });
|
||||
harness.emit({ type: "auto_retry_start", attempt: 1, maxAttempts: 3, delayMs: 250, errorMessage: "retry me" });
|
||||
harness.emit({ type: "auto_retry_end", success: false, attempt: 1, finalError: "still failing" });
|
||||
harness.emit({ type: "auto_compaction_start", reason: "threshold" });
|
||||
harness.emit({ type: "auto_compaction_end", result: undefined, aborted: false, willRetry: false });
|
||||
|
||||
const events = await readSseEventsUntil(
|
||||
response,
|
||||
(seen) => seen.filter((event) => event.type === "live_state_invalidation").length >= 5,
|
||||
);
|
||||
const invalidations = events.filter((event) => event.type === "live_state_invalidation");
|
||||
|
||||
assert.deepEqual(
|
||||
invalidations.map((event) => ({
|
||||
reason: event.reason,
|
||||
source: event.source,
|
||||
workspaceIndexCacheInvalidated: event.workspaceIndexCacheInvalidated,
|
||||
})),
|
||||
[
|
||||
{ reason: "agent_end", source: "bridge_event", workspaceIndexCacheInvalidated: true },
|
||||
{ reason: "auto_retry_start", source: "bridge_event", workspaceIndexCacheInvalidated: false },
|
||||
{ reason: "auto_retry_end", source: "bridge_event", workspaceIndexCacheInvalidated: false },
|
||||
{ reason: "auto_compaction_start", source: "bridge_event", workspaceIndexCacheInvalidated: false },
|
||||
{ reason: "auto_compaction_end", source: "bridge_event", workspaceIndexCacheInvalidated: false },
|
||||
],
|
||||
"live_state_invalidation reasons/sources should stay inspectable on /api/session/events",
|
||||
);
|
||||
assert.deepEqual(invalidations[0].domains, ["auto", "workspace", "recovery"]);
|
||||
assert.deepEqual(invalidations[1].domains, ["auto", "recovery"]);
|
||||
assert.deepEqual(invalidations[2].domains, ["auto", "recovery"]);
|
||||
assert.deepEqual(invalidations[3].domains, ["auto", "recovery"]);
|
||||
assert.deepEqual(invalidations[4].domains, ["auto", "recovery"]);
|
||||
|
||||
controller.abort();
|
||||
await waitForMicrotasks();
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const controller = new AbortController();
|
||||
const response = await eventsRoute.GET(
|
||||
new Request("http://localhost/api/session/events", { signal: controller.signal }),
|
||||
);
|
||||
|
||||
harness.emit({ type: "agent_end" });
|
||||
harness.emit({ type: "auto_retry_start", attempt: 1, maxAttempts: 3, delayMs: 250, errorMessage: "retry me" });
|
||||
harness.emit({ type: "auto_retry_end", success: false, attempt: 1, finalError: "still failing" });
|
||||
harness.emit({ type: "auto_compaction_start", reason: "threshold" });
|
||||
harness.emit({ type: "auto_compaction_end", result: undefined, aborted: false, willRetry: false });
|
||||
|
||||
const events = await readSseEventsUntil(
|
||||
response,
|
||||
(seen) => seen.filter((event) => event.type === "live_state_invalidation").length >= 5,
|
||||
);
|
||||
const invalidations = events.filter((event) => event.type === "live_state_invalidation");
|
||||
|
||||
assert.deepEqual(
|
||||
invalidations.map((event) => ({
|
||||
reason: event.reason,
|
||||
source: event.source,
|
||||
workspaceIndexCacheInvalidated: event.workspaceIndexCacheInvalidated,
|
||||
})),
|
||||
[
|
||||
{ reason: "agent_end", source: "bridge_event", workspaceIndexCacheInvalidated: true },
|
||||
{ reason: "auto_retry_start", source: "bridge_event", workspaceIndexCacheInvalidated: false },
|
||||
{ reason: "auto_retry_end", source: "bridge_event", workspaceIndexCacheInvalidated: false },
|
||||
{ reason: "auto_compaction_start", source: "bridge_event", workspaceIndexCacheInvalidated: false },
|
||||
{ reason: "auto_compaction_end", source: "bridge_event", workspaceIndexCacheInvalidated: false },
|
||||
],
|
||||
"live_state_invalidation reasons/sources should stay inspectable on /api/session/events",
|
||||
);
|
||||
assert.deepEqual(invalidations[0].domains, ["auto", "workspace", "recovery"]);
|
||||
assert.deepEqual(invalidations[1].domains, ["auto", "recovery"]);
|
||||
assert.deepEqual(invalidations[2].domains, ["auto", "recovery"]);
|
||||
assert.deepEqual(invalidations[3].domains, ["auto", "recovery"]);
|
||||
assert.deepEqual(invalidations[4].domains, ["auto", "recovery"]);
|
||||
|
||||
controller.abort();
|
||||
await waitForMicrotasks();
|
||||
});
|
||||
|
||||
test("workspace cache only busts on real boundaries and session mutations emit targeted invalidations", async () => {
|
||||
test("workspace cache only busts on real boundaries and session mutations emit targeted invalidations", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
const activeSessionPath = createSessionFile(
|
||||
fixture.projectCwd,
|
||||
|
|
@ -489,99 +489,99 @@ test("workspace cache only busts on real boundaries and session mutations emit t
|
|||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const service = bridge.getProjectBridgeService();
|
||||
await service.ensureStarted();
|
||||
const seenEvents: any[] = [];
|
||||
const unsubscribe = service.subscribe((event) => {
|
||||
seenEvents.push(event);
|
||||
});
|
||||
|
||||
await bridge.collectBootPayload();
|
||||
await bridge.collectBootPayload();
|
||||
assert.equal(workspaceIndexCalls, 1, "boot snapshot should stay cached before any invalidation boundary fires");
|
||||
|
||||
harness.emit({ type: "agent_end" });
|
||||
await waitForMicrotasks();
|
||||
await bridge.collectBootPayload();
|
||||
assert.equal(workspaceIndexCalls, 2, "agent_end should invalidate the cached workspace snapshot");
|
||||
|
||||
harness.emit({ type: "auto_retry_start", attempt: 1, maxAttempts: 3, delayMs: 100, errorMessage: "retry me" });
|
||||
await waitForMicrotasks();
|
||||
await bridge.collectBootPayload();
|
||||
assert.equal(workspaceIndexCalls, 2, "auto_retry_start should not invalidate the workspace snapshot cache");
|
||||
|
||||
harness.emit({ type: "auto_compaction_start", reason: "threshold" });
|
||||
await waitForMicrotasks();
|
||||
await bridge.collectBootPayload();
|
||||
assert.equal(workspaceIndexCalls, 2, "auto_compaction_start should not invalidate the workspace snapshot cache");
|
||||
|
||||
const switchResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "switch_session", sessionPath: otherSessionPath }),
|
||||
}),
|
||||
);
|
||||
assert.equal(switchResponse.status, 200);
|
||||
|
||||
const newSessionResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "new_session" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(newSessionResponse.status, 200);
|
||||
|
||||
const forkResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "fork", entryId: "entry-1" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(forkResponse.status, 200);
|
||||
|
||||
const renameResponse = await manageRoute.POST(
|
||||
new Request("http://localhost/api/session/manage", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "rename",
|
||||
sessionPath: otherSessionPath,
|
||||
name: "Renamed Session",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
const renamePayload = await renameResponse.json() as any;
|
||||
assert.equal(renameResponse.status, 200);
|
||||
assert.equal(renamePayload.success, true);
|
||||
assert.equal(renamePayload.mutation, "session_file");
|
||||
|
||||
await waitForMicrotasks();
|
||||
|
||||
const invalidations = seenEvents.filter((event) => event.type === "live_state_invalidation");
|
||||
const reasons = invalidations.map((event) => event.reason);
|
||||
assert.ok(reasons.includes("agent_end"), "missing agent_end live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("auto_retry_start"), "missing auto_retry_start live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("auto_compaction_start"), "missing auto_compaction_start live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("switch_session"), "missing switch_session live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("new_session"), "missing new_session live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("fork"), "missing fork live_state_invalidation trigger");
|
||||
|
||||
const switchInvalidation = invalidations.find((event) => event.reason === "switch_session");
|
||||
assert.ok(switchInvalidation, "switch_session should emit a targeted freshness event");
|
||||
assert.deepEqual(switchInvalidation.domains, ["resumable_sessions", "recovery"]);
|
||||
assert.equal(switchInvalidation.workspaceIndexCacheInvalidated, false);
|
||||
|
||||
const renameInvalidation = invalidations.find(
|
||||
(event) => event.reason === "set_session_name" && event.source === "session_manage",
|
||||
);
|
||||
assert.ok(renameInvalidation, "inactive rename should emit an inspectable set_session_name invalidation");
|
||||
assert.deepEqual(renameInvalidation.domains, ["resumable_sessions"]);
|
||||
assert.equal(renameInvalidation.workspaceIndexCacheInvalidated, false);
|
||||
|
||||
unsubscribe();
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const service = bridge.getProjectBridgeService();
|
||||
await service.ensureStarted();
|
||||
const seenEvents: any[] = [];
|
||||
const unsubscribe = service.subscribe((event) => {
|
||||
seenEvents.push(event);
|
||||
});
|
||||
|
||||
await bridge.collectBootPayload();
|
||||
await bridge.collectBootPayload();
|
||||
assert.equal(workspaceIndexCalls, 1, "boot snapshot should stay cached before any invalidation boundary fires");
|
||||
|
||||
harness.emit({ type: "agent_end" });
|
||||
await waitForMicrotasks();
|
||||
await bridge.collectBootPayload();
|
||||
assert.equal(workspaceIndexCalls, 2, "agent_end should invalidate the cached workspace snapshot");
|
||||
|
||||
harness.emit({ type: "auto_retry_start", attempt: 1, maxAttempts: 3, delayMs: 100, errorMessage: "retry me" });
|
||||
await waitForMicrotasks();
|
||||
await bridge.collectBootPayload();
|
||||
assert.equal(workspaceIndexCalls, 2, "auto_retry_start should not invalidate the workspace snapshot cache");
|
||||
|
||||
harness.emit({ type: "auto_compaction_start", reason: "threshold" });
|
||||
await waitForMicrotasks();
|
||||
await bridge.collectBootPayload();
|
||||
assert.equal(workspaceIndexCalls, 2, "auto_compaction_start should not invalidate the workspace snapshot cache");
|
||||
|
||||
const switchResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "switch_session", sessionPath: otherSessionPath }),
|
||||
}),
|
||||
);
|
||||
assert.equal(switchResponse.status, 200);
|
||||
|
||||
const newSessionResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "new_session" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(newSessionResponse.status, 200);
|
||||
|
||||
const forkResponse = await commandRoute.POST(
|
||||
new Request("http://localhost/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "fork", entryId: "entry-1" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(forkResponse.status, 200);
|
||||
|
||||
const renameResponse = await manageRoute.POST(
|
||||
new Request("http://localhost/api/session/manage", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "rename",
|
||||
sessionPath: otherSessionPath,
|
||||
name: "Renamed Session",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
const renamePayload = await renameResponse.json() as any;
|
||||
assert.equal(renameResponse.status, 200);
|
||||
assert.equal(renamePayload.success, true);
|
||||
assert.equal(renamePayload.mutation, "session_file");
|
||||
|
||||
await waitForMicrotasks();
|
||||
|
||||
const invalidations = seenEvents.filter((event) => event.type === "live_state_invalidation");
|
||||
const reasons = invalidations.map((event) => event.reason);
|
||||
assert.ok(reasons.includes("agent_end"), "missing agent_end live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("auto_retry_start"), "missing auto_retry_start live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("auto_compaction_start"), "missing auto_compaction_start live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("switch_session"), "missing switch_session live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("new_session"), "missing new_session live_state_invalidation trigger");
|
||||
assert.ok(reasons.includes("fork"), "missing fork live_state_invalidation trigger");
|
||||
|
||||
const switchInvalidation = invalidations.find((event) => event.reason === "switch_session");
|
||||
assert.ok(switchInvalidation, "switch_session should emit a targeted freshness event");
|
||||
assert.deepEqual(switchInvalidation.domains, ["resumable_sessions", "recovery"]);
|
||||
assert.equal(switchInvalidation.workspaceIndexCacheInvalidated, false);
|
||||
|
||||
const renameInvalidation = invalidations.find(
|
||||
(event) => event.reason === "set_session_name" && event.source === "session_manage",
|
||||
);
|
||||
assert.ok(renameInvalidation, "inactive rename should emit an inspectable set_session_name invalidation");
|
||||
assert.deepEqual(renameInvalidation.domains, ["resumable_sessions"]);
|
||||
assert.equal(renameInvalidation.workspaceIndexCacheInvalidated, false);
|
||||
|
||||
unsubscribe();
|
||||
});
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -65,7 +65,7 @@ test('parseCliArgs does not set network flags when not provided', () => {
|
|||
|
||||
// ─── launchWebMode env forwarding ────────────────────────────────────
|
||||
|
||||
test('launchWebMode forwards custom host, port, and allowed origins to subprocess env', async () => {
|
||||
test('launchWebMode forwards custom host, port, and allowed origins to subprocess env', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-net-'))
|
||||
const standaloneRoot = join(tmp, 'dist', 'web', 'standalone')
|
||||
const serverPath = join(standaloneRoot, 'server.js')
|
||||
|
|
@ -74,47 +74,45 @@ test('launchWebMode forwards custom host, port, and allowed origins to subproces
|
|||
|
||||
let spawnEnv: Record<string, string> | undefined
|
||||
|
||||
try {
|
||||
const status = await webMode.launchWebMode(
|
||||
{
|
||||
cwd: '/tmp/project',
|
||||
projectSessionsDir: '/tmp/.gsd/sessions',
|
||||
agentDir: '/tmp/.gsd/agent',
|
||||
packageRoot: tmp,
|
||||
host: '0.0.0.0',
|
||||
port: 8080,
|
||||
allowedOrigins: ['http://192.168.1.10:8080', 'http://tailscale-host:8080'],
|
||||
},
|
||||
{
|
||||
initResources: () => {},
|
||||
spawn: (_command, _args, options) => {
|
||||
spawnEnv = (options as { env: Record<string, string> }).env
|
||||
return { pid: 99999, once: () => undefined, unref: () => {} } as any
|
||||
},
|
||||
waitForBootReady: async () => undefined,
|
||||
openBrowser: () => {},
|
||||
stderr: { write: () => true },
|
||||
},
|
||||
)
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }) });
|
||||
|
||||
assert.equal(status.ok, true)
|
||||
if (!status.ok) throw new Error('expected success')
|
||||
assert.equal(status.host, '0.0.0.0')
|
||||
assert.equal(status.port, 8080)
|
||||
assert.equal(status.url, 'http://0.0.0.0:8080')
|
||||
const status = await webMode.launchWebMode(
|
||||
{
|
||||
cwd: '/tmp/project',
|
||||
projectSessionsDir: '/tmp/.gsd/sessions',
|
||||
agentDir: '/tmp/.gsd/agent',
|
||||
packageRoot: tmp,
|
||||
host: '0.0.0.0',
|
||||
port: 8080,
|
||||
allowedOrigins: ['http://192.168.1.10:8080', 'http://tailscale-host:8080'],
|
||||
},
|
||||
{
|
||||
initResources: () => {},
|
||||
spawn: (_command, _args, options) => {
|
||||
spawnEnv = (options as { env: Record<string, string> }).env
|
||||
return { pid: 99999, once: () => undefined, unref: () => {} } as any
|
||||
},
|
||||
waitForBootReady: async () => undefined,
|
||||
openBrowser: () => {},
|
||||
stderr: { write: () => true },
|
||||
},
|
||||
)
|
||||
|
||||
assert.ok(spawnEnv)
|
||||
assert.equal(spawnEnv!.HOSTNAME, '0.0.0.0')
|
||||
assert.equal(spawnEnv!.PORT, '8080')
|
||||
assert.equal(spawnEnv!.GSD_WEB_HOST, '0.0.0.0')
|
||||
assert.equal(spawnEnv!.GSD_WEB_PORT, '8080')
|
||||
assert.equal(spawnEnv!.GSD_WEB_ALLOWED_ORIGINS, 'http://192.168.1.10:8080,http://tailscale-host:8080')
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
assert.equal(status.ok, true)
|
||||
if (!status.ok) throw new Error('expected success')
|
||||
assert.equal(status.host, '0.0.0.0')
|
||||
assert.equal(status.port, 8080)
|
||||
assert.equal(status.url, 'http://0.0.0.0:8080')
|
||||
|
||||
assert.ok(spawnEnv)
|
||||
assert.equal(spawnEnv!.HOSTNAME, '0.0.0.0')
|
||||
assert.equal(spawnEnv!.PORT, '8080')
|
||||
assert.equal(spawnEnv!.GSD_WEB_HOST, '0.0.0.0')
|
||||
assert.equal(spawnEnv!.GSD_WEB_PORT, '8080')
|
||||
assert.equal(spawnEnv!.GSD_WEB_ALLOWED_ORIGINS, 'http://192.168.1.10:8080,http://tailscale-host:8080')
|
||||
})
|
||||
|
||||
test('launchWebMode omits GSD_WEB_ALLOWED_ORIGINS when none provided', async () => {
|
||||
test('launchWebMode omits GSD_WEB_ALLOWED_ORIGINS when none provided', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-no-origins-'))
|
||||
const standaloneRoot = join(tmp, 'dist', 'web', 'standalone')
|
||||
const serverPath = join(standaloneRoot, 'server.js')
|
||||
|
|
@ -123,79 +121,75 @@ test('launchWebMode omits GSD_WEB_ALLOWED_ORIGINS when none provided', async ()
|
|||
|
||||
let spawnEnv: Record<string, string> | undefined
|
||||
|
||||
try {
|
||||
await webMode.launchWebMode(
|
||||
{
|
||||
cwd: '/tmp/project',
|
||||
projectSessionsDir: '/tmp/.gsd/sessions',
|
||||
agentDir: '/tmp/.gsd/agent',
|
||||
packageRoot: tmp,
|
||||
},
|
||||
{
|
||||
initResources: () => {},
|
||||
resolvePort: async () => 45000,
|
||||
env: { CLEAN_ENV: '1' },
|
||||
spawn: (_command, _args, options) => {
|
||||
spawnEnv = (options as { env: Record<string, string> }).env
|
||||
return { pid: 99999, once: () => undefined, unref: () => {} } as any
|
||||
},
|
||||
waitForBootReady: async () => undefined,
|
||||
openBrowser: () => {},
|
||||
stderr: { write: () => true },
|
||||
},
|
||||
)
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }) });
|
||||
|
||||
assert.ok(spawnEnv)
|
||||
assert.equal(spawnEnv!.GSD_WEB_ALLOWED_ORIGINS, undefined)
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
await webMode.launchWebMode(
|
||||
{
|
||||
cwd: '/tmp/project',
|
||||
projectSessionsDir: '/tmp/.gsd/sessions',
|
||||
agentDir: '/tmp/.gsd/agent',
|
||||
packageRoot: tmp,
|
||||
},
|
||||
{
|
||||
initResources: () => {},
|
||||
resolvePort: async () => 45000,
|
||||
env: { CLEAN_ENV: '1' },
|
||||
spawn: (_command, _args, options) => {
|
||||
spawnEnv = (options as { env: Record<string, string> }).env
|
||||
return { pid: 99999, once: () => undefined, unref: () => {} } as any
|
||||
},
|
||||
waitForBootReady: async () => undefined,
|
||||
openBrowser: () => {},
|
||||
stderr: { write: () => true },
|
||||
},
|
||||
)
|
||||
|
||||
assert.ok(spawnEnv)
|
||||
assert.equal(spawnEnv!.GSD_WEB_ALLOWED_ORIGINS, undefined)
|
||||
})
|
||||
|
||||
// ─── runWebCliBranch end-to-end forwarding ───────────────────────────
|
||||
|
||||
test('runWebCliBranch forwards --host, --port, --allowed-origins to launchWebMode', async () => {
|
||||
test('runWebCliBranch forwards --host, --port, --allowed-origins to launchWebMode', async (t) => {
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'gsd-web-branch-flags-'))
|
||||
const projectDir = join(tmp, 'project')
|
||||
mkdirSync(projectDir, { recursive: true })
|
||||
|
||||
let receivedOptions: Record<string, unknown> | undefined
|
||||
|
||||
try {
|
||||
const flags = cliWeb.parseCliArgs([
|
||||
'node', 'dist/loader.js', '--web', projectDir,
|
||||
'--host', '0.0.0.0',
|
||||
'--port', '9000',
|
||||
'--allowed-origins', 'http://my-host:9000',
|
||||
])
|
||||
t.after(() => { rmSync(tmp, { recursive: true, force: true }) });
|
||||
|
||||
const result = await cliWeb.runWebCliBranch(flags, {
|
||||
runWebMode: async (options) => {
|
||||
receivedOptions = options as unknown as Record<string, unknown>
|
||||
return {
|
||||
mode: 'web' as const,
|
||||
ok: true as const,
|
||||
cwd: options.cwd,
|
||||
projectSessionsDir: options.projectSessionsDir,
|
||||
host: '0.0.0.0',
|
||||
port: 9000,
|
||||
url: 'http://0.0.0.0:9000',
|
||||
hostKind: 'source-dev' as const,
|
||||
hostPath: '/tmp/fake-web/package.json',
|
||||
hostRoot: '/tmp/fake-web',
|
||||
}
|
||||
},
|
||||
stderr: { write: () => true },
|
||||
})
|
||||
const flags = cliWeb.parseCliArgs([
|
||||
'node', 'dist/loader.js', '--web', projectDir,
|
||||
'--host', '0.0.0.0',
|
||||
'--port', '9000',
|
||||
'--allowed-origins', 'http://my-host:9000',
|
||||
])
|
||||
|
||||
assert.equal(result.handled, true)
|
||||
if (!result.handled) throw new Error('expected handled')
|
||||
assert.equal(result.exitCode, 0)
|
||||
assert.ok(receivedOptions)
|
||||
assert.equal(receivedOptions!.host, '0.0.0.0')
|
||||
assert.equal(receivedOptions!.port, 9000)
|
||||
assert.deepEqual(receivedOptions!.allowedOrigins, ['http://my-host:9000'])
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true })
|
||||
}
|
||||
const result = await cliWeb.runWebCliBranch(flags, {
|
||||
runWebMode: async (options) => {
|
||||
receivedOptions = options as unknown as Record<string, unknown>
|
||||
return {
|
||||
mode: 'web' as const,
|
||||
ok: true as const,
|
||||
cwd: options.cwd,
|
||||
projectSessionsDir: options.projectSessionsDir,
|
||||
host: '0.0.0.0',
|
||||
port: 9000,
|
||||
url: 'http://0.0.0.0:9000',
|
||||
hostKind: 'source-dev' as const,
|
||||
hostPath: '/tmp/fake-web/package.json',
|
||||
hostRoot: '/tmp/fake-web',
|
||||
}
|
||||
},
|
||||
stderr: { write: () => true },
|
||||
})
|
||||
|
||||
assert.equal(result.handled, true)
|
||||
if (!result.handled) throw new Error('expected handled')
|
||||
assert.equal(result.exitCode, 0)
|
||||
assert.ok(receivedOptions)
|
||||
assert.equal(receivedOptions!.host, '0.0.0.0')
|
||||
assert.equal(receivedOptions!.port, 9000)
|
||||
assert.deepEqual(receivedOptions!.allowedOrigins, ['http://my-host:9000'])
|
||||
})
|
||||
|
|
|
|||
|
|
@ -230,7 +230,7 @@ function createHarness(sessionId: string) {
|
|||
// Tests — multi-project bridge coexistence
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
test("multi-project: getProjectBridgeServiceForCwd returns distinct instances for different project paths", async () => {
|
||||
test("multi-project: getProjectBridgeServiceForCwd returns distinct instances for different project paths", async (t) => {
|
||||
const fixtureA = makeWorkspaceFixture("A");
|
||||
const fixtureB = makeWorkspaceFixture("B");
|
||||
|
||||
|
|
@ -247,23 +247,23 @@ test("multi-project: getProjectBridgeServiceForCwd returns distinct instances fo
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const bridgeA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const bridgeB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
assert.notStrictEqual(bridgeA, bridgeB, "bridges for different paths must be distinct instances");
|
||||
|
||||
const snapA = bridgeA.getSnapshot();
|
||||
const snapB = bridgeB.getSnapshot();
|
||||
assert.equal(snapA.projectCwd, fixtureA.projectCwd);
|
||||
assert.equal(snapB.projectCwd, fixtureB.projectCwd);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixtureA.cleanup();
|
||||
fixtureB.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bridgeA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const bridgeB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
assert.notStrictEqual(bridgeA, bridgeB, "bridges for different paths must be distinct instances");
|
||||
|
||||
const snapA = bridgeA.getSnapshot();
|
||||
const snapB = bridgeB.getSnapshot();
|
||||
assert.equal(snapA.projectCwd, fixtureA.projectCwd);
|
||||
assert.equal(snapB.projectCwd, fixtureB.projectCwd);
|
||||
});
|
||||
|
||||
test("multi-project: getProjectBridgeServiceForCwd returns same instance for same path", async () => {
|
||||
test("multi-project: getProjectBridgeServiceForCwd returns same instance for same path", async (t) => {
|
||||
const fixtureA = makeWorkspaceFixture("idempotent");
|
||||
|
||||
bridge.configureBridgeServiceForTests({
|
||||
|
|
@ -279,17 +279,17 @@ test("multi-project: getProjectBridgeServiceForCwd returns same instance for sam
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const first = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const second = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
assert.strictEqual(first, second, "same path must return the same instance");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixtureA.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const first = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const second = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
assert.strictEqual(first, second, "same path must return the same instance");
|
||||
});
|
||||
|
||||
test("multi-project: each bridge receives commands independently", async () => {
|
||||
test("multi-project: each bridge receives commands independently", async (t) => {
|
||||
const fixtureA = makeWorkspaceFixture("cmd-A");
|
||||
const fixtureB = makeWorkspaceFixture("cmd-B");
|
||||
const sessionPathA = createSessionFile(fixtureA.projectCwd, fixtureA.sessionsDir, "sess-A", "Session A");
|
||||
|
|
@ -320,43 +320,43 @@ test("multi-project: each bridge receives commands independently", async () => {
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const bridgeA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const bridgeB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
|
||||
// Start both bridges
|
||||
await bridgeA.ensureStarted();
|
||||
await bridgeB.ensureStarted();
|
||||
|
||||
// Send get_state to bridge A
|
||||
const responseA = await bridgeA.sendInput({ type: "get_state" } as any);
|
||||
assert.equal(responseA?.success, true);
|
||||
assert.equal((responseA as any).data.sessionId, "sess-A");
|
||||
|
||||
// Send get_state to bridge B
|
||||
const responseB = await bridgeB.sendInput({ type: "get_state" } as any);
|
||||
assert.equal(responseB?.success, true);
|
||||
assert.equal((responseB as any).data.sessionId, "sess-B");
|
||||
|
||||
// Each harness only got its own commands
|
||||
assert.ok(harnessA.commands.length >= 1, "harness A received commands");
|
||||
assert.ok(harnessB.commands.length >= 1, "harness B received commands");
|
||||
assert.ok(
|
||||
harnessA.commands.every((c: any) => c.type === "get_state"),
|
||||
"harness A only got get_state commands",
|
||||
);
|
||||
assert.ok(
|
||||
harnessB.commands.every((c: any) => c.type === "get_state"),
|
||||
"harness B only got get_state commands",
|
||||
);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixtureA.cleanup();
|
||||
fixtureB.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bridgeA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const bridgeB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
|
||||
// Start both bridges
|
||||
await bridgeA.ensureStarted();
|
||||
await bridgeB.ensureStarted();
|
||||
|
||||
// Send get_state to bridge A
|
||||
const responseA = await bridgeA.sendInput({ type: "get_state" } as any);
|
||||
assert.equal(responseA?.success, true);
|
||||
assert.equal((responseA as any).data.sessionId, "sess-A");
|
||||
|
||||
// Send get_state to bridge B
|
||||
const responseB = await bridgeB.sendInput({ type: "get_state" } as any);
|
||||
assert.equal(responseB?.success, true);
|
||||
assert.equal((responseB as any).data.sessionId, "sess-B");
|
||||
|
||||
// Each harness only got its own commands
|
||||
assert.ok(harnessA.commands.length >= 1, "harness A received commands");
|
||||
assert.ok(harnessB.commands.length >= 1, "harness B received commands");
|
||||
assert.ok(
|
||||
harnessA.commands.every((c: any) => c.type === "get_state"),
|
||||
"harness A only got get_state commands",
|
||||
);
|
||||
assert.ok(
|
||||
harnessB.commands.every((c: any) => c.type === "get_state"),
|
||||
"harness B only got get_state commands",
|
||||
);
|
||||
});
|
||||
|
||||
test("multi-project: SSE subscribers are isolated per bridge", async () => {
|
||||
test("multi-project: SSE subscribers are isolated per bridge", async (t) => {
|
||||
const fixtureA = makeWorkspaceFixture("sse-A");
|
||||
const fixtureB = makeWorkspaceFixture("sse-B");
|
||||
|
||||
|
|
@ -375,52 +375,52 @@ test("multi-project: SSE subscribers are isolated per bridge", async () => {
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const bridgeA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const bridgeB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
|
||||
const eventsA: any[] = [];
|
||||
const eventsB: any[] = [];
|
||||
|
||||
const unsubA = bridgeA.subscribe((event) => eventsA.push(event));
|
||||
const unsubB = bridgeB.subscribe((event) => eventsB.push(event));
|
||||
|
||||
// Subscribe fires an initial bridge_status event for each
|
||||
const initialA = eventsA.length;
|
||||
const initialB = eventsB.length;
|
||||
|
||||
// Start bridge A so it has a child process
|
||||
await bridgeA.ensureStarted();
|
||||
await waitForMicrotasks();
|
||||
|
||||
// Filter to only non-bridge_status events that we emit manually
|
||||
const agentEventsA: any[] = [];
|
||||
const agentEventsB: any[] = [];
|
||||
|
||||
const unsubA2 = bridgeA.subscribe((event) => {
|
||||
if (event.type !== "bridge_status") agentEventsA.push(event);
|
||||
});
|
||||
const unsubB2 = bridgeB.subscribe((event) => {
|
||||
if (event.type !== "bridge_status") agentEventsB.push(event);
|
||||
});
|
||||
|
||||
// Emit an agent event on bridge A's child process
|
||||
harnessA.emit({ type: "agent_start" });
|
||||
await waitForMicrotasks();
|
||||
|
||||
// Bridge A's subscriber should see it; bridge B's should not
|
||||
assert.ok(agentEventsA.length > 0, "bridge A subscriber should see agent_start");
|
||||
assert.equal(agentEventsB.length, 0, "bridge B subscriber should NOT see events from bridge A");
|
||||
|
||||
unsubA();
|
||||
unsubB();
|
||||
unsubA2();
|
||||
unsubB2();
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixtureA.cleanup();
|
||||
fixtureB.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bridgeA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const bridgeB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
|
||||
const eventsA: any[] = [];
|
||||
const eventsB: any[] = [];
|
||||
|
||||
const unsubA = bridgeA.subscribe((event) => eventsA.push(event));
|
||||
const unsubB = bridgeB.subscribe((event) => eventsB.push(event));
|
||||
|
||||
// Subscribe fires an initial bridge_status event for each
|
||||
const initialA = eventsA.length;
|
||||
const initialB = eventsB.length;
|
||||
|
||||
// Start bridge A so it has a child process
|
||||
await bridgeA.ensureStarted();
|
||||
await waitForMicrotasks();
|
||||
|
||||
// Filter to only non-bridge_status events that we emit manually
|
||||
const agentEventsA: any[] = [];
|
||||
const agentEventsB: any[] = [];
|
||||
|
||||
const unsubA2 = bridgeA.subscribe((event) => {
|
||||
if (event.type !== "bridge_status") agentEventsA.push(event);
|
||||
});
|
||||
const unsubB2 = bridgeB.subscribe((event) => {
|
||||
if (event.type !== "bridge_status") agentEventsB.push(event);
|
||||
});
|
||||
|
||||
// Emit an agent event on bridge A's child process
|
||||
harnessA.emit({ type: "agent_start" });
|
||||
await waitForMicrotasks();
|
||||
|
||||
// Bridge A's subscriber should see it; bridge B's should not
|
||||
assert.ok(agentEventsA.length > 0, "bridge A subscriber should see agent_start");
|
||||
assert.equal(agentEventsB.length, 0, "bridge B subscriber should NOT see events from bridge A");
|
||||
|
||||
unsubA();
|
||||
unsubB();
|
||||
unsubA2();
|
||||
unsubB2();
|
||||
});
|
||||
|
||||
test("multi-project: resolveProjectCwd reads ?project= from request URL", () => {
|
||||
|
|
@ -430,7 +430,7 @@ test("multi-project: resolveProjectCwd reads ?project= from request URL", () =>
|
|||
assert.equal(result, "/tmp/my-project");
|
||||
});
|
||||
|
||||
test("multi-project: resolveProjectCwd falls back to GSD_WEB_PROJECT_CWD when no ?project= present", () => {
|
||||
test("multi-project: resolveProjectCwd falls back to GSD_WEB_PROJECT_CWD when no ?project= present", (t) => {
|
||||
bridge.configureBridgeServiceForTests({
|
||||
env: {
|
||||
...process.env,
|
||||
|
|
@ -443,17 +443,15 @@ test("multi-project: resolveProjectCwd falls back to GSD_WEB_PROJECT_CWD when no
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const result = bridge.resolveProjectCwd(
|
||||
new Request("http://localhost/api/boot"),
|
||||
);
|
||||
assert.equal(result, "/fallback/path");
|
||||
} finally {
|
||||
bridge.configureBridgeServiceForTests(null);
|
||||
}
|
||||
t.after(() => { bridge.configureBridgeServiceForTests(null); });
|
||||
|
||||
const result = bridge.resolveProjectCwd(
|
||||
new Request("http://localhost/api/boot"),
|
||||
);
|
||||
assert.equal(result, "/fallback/path");
|
||||
});
|
||||
|
||||
test("multi-project: getProjectBridgeService backward compat shim works", async () => {
|
||||
test("multi-project: getProjectBridgeService backward compat shim works", async (t) => {
|
||||
const fixture = makeWorkspaceFixture("compat");
|
||||
const harness = createHarness("sess-compat");
|
||||
|
||||
|
|
@ -470,23 +468,23 @@ test("multi-project: getProjectBridgeService backward compat shim works", async
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
const service = bridge.getProjectBridgeService();
|
||||
assert.ok(service, "getProjectBridgeService() should return a BridgeService");
|
||||
const snapshot = service.getSnapshot();
|
||||
assert.equal(snapshot.projectCwd, fixture.projectCwd, "backward compat shim should use env-resolved projectCwd");
|
||||
assert.equal(snapshot.phase, "idle");
|
||||
|
||||
// Same instance as getProjectBridgeServiceForCwd with the same path
|
||||
const directService = bridge.getProjectBridgeServiceForCwd(fixture.projectCwd);
|
||||
assert.strictEqual(service, directService, "backward compat shim should return same instance as direct lookup");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const service = bridge.getProjectBridgeService();
|
||||
assert.ok(service, "getProjectBridgeService() should return a BridgeService");
|
||||
const snapshot = service.getSnapshot();
|
||||
assert.equal(snapshot.projectCwd, fixture.projectCwd, "backward compat shim should use env-resolved projectCwd");
|
||||
assert.equal(snapshot.phase, "idle");
|
||||
|
||||
// Same instance as getProjectBridgeServiceForCwd with the same path
|
||||
const directService = bridge.getProjectBridgeServiceForCwd(fixture.projectCwd);
|
||||
assert.strictEqual(service, directService, "backward compat shim should return same instance as direct lookup");
|
||||
});
|
||||
|
||||
test("multi-project: resetBridgeServiceForTests clears all registry entries", async () => {
|
||||
test("multi-project: resetBridgeServiceForTests clears all registry entries", async (t) => {
|
||||
const fixtureA = makeWorkspaceFixture("reset-A");
|
||||
const fixtureB = makeWorkspaceFixture("reset-B");
|
||||
|
||||
|
|
@ -503,38 +501,38 @@ test("multi-project: resetBridgeServiceForTests clears all registry entries", as
|
|||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
try {
|
||||
// Create two bridge instances
|
||||
const beforeA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const beforeB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
assert.notStrictEqual(beforeA, beforeB);
|
||||
|
||||
// Reset clears the registry
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
|
||||
// Re-configure after reset (reset clears overrides too)
|
||||
bridge.configureBridgeServiceForTests({
|
||||
env: {
|
||||
...process.env,
|
||||
GSD_WEB_PROJECT_CWD: fixtureA.projectCwd,
|
||||
GSD_WEB_PROJECT_SESSIONS_DIR: fixtureA.sessionsDir,
|
||||
GSD_WEB_PACKAGE_ROOT: repoRoot,
|
||||
},
|
||||
spawn: createHarness("unused").spawn,
|
||||
indexWorkspace: async () => fakeWorkspaceIndex(),
|
||||
getAutoDashboardData: () => fakeAutoDashboardData(),
|
||||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
// Should get new instances
|
||||
const afterA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const afterB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
assert.notStrictEqual(afterA, beforeA, "reset must create fresh instances for path A");
|
||||
assert.notStrictEqual(afterB, beforeB, "reset must create fresh instances for path B");
|
||||
assert.notStrictEqual(afterA, afterB, "new instances should still be distinct");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
fixtureA.cleanup();
|
||||
fixtureB.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
// Create two bridge instances
|
||||
const beforeA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const beforeB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
assert.notStrictEqual(beforeA, beforeB);
|
||||
|
||||
// Reset clears the registry
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
|
||||
// Re-configure after reset (reset clears overrides too)
|
||||
bridge.configureBridgeServiceForTests({
|
||||
env: {
|
||||
...process.env,
|
||||
GSD_WEB_PROJECT_CWD: fixtureA.projectCwd,
|
||||
GSD_WEB_PROJECT_SESSIONS_DIR: fixtureA.sessionsDir,
|
||||
GSD_WEB_PACKAGE_ROOT: repoRoot,
|
||||
},
|
||||
spawn: createHarness("unused").spawn,
|
||||
indexWorkspace: async () => fakeWorkspaceIndex(),
|
||||
getAutoDashboardData: () => fakeAutoDashboardData(),
|
||||
getOnboardingNeeded: () => false,
|
||||
});
|
||||
|
||||
// Should get new instances
|
||||
const afterA = bridge.getProjectBridgeServiceForCwd(fixtureA.projectCwd);
|
||||
const afterB = bridge.getProjectBridgeServiceForCwd(fixtureB.projectCwd);
|
||||
assert.notStrictEqual(afterA, beforeA, "reset must create fresh instances for path A");
|
||||
assert.notStrictEqual(afterB, beforeB, "reset must create fresh instances for path B");
|
||||
assert.notStrictEqual(afterA, afterB, "new instances should still be distinct");
|
||||
});
|
||||
|
|
|
|||
|
|
@ -304,60 +304,60 @@ function configureBridgeFixture(fixture: { projectCwd: string; sessionsDir: stri
|
|||
return harness;
|
||||
}
|
||||
|
||||
test("boot and onboarding routes expose locked required state plus explicitly skippable optional setup when auth is missing", async () => {
|
||||
test("boot and onboarding routes expose locked required state plus explicitly skippable optional setup when auth is missing", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
clearOnboardingEnv();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
configureBridgeFixture(fixture, "sess-missing-auth");
|
||||
onboarding.configureOnboardingServiceForTests({ authStorage, getEnvApiKey: noEnvApiKey });
|
||||
|
||||
try {
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
|
||||
assert.equal(bootPayload.onboardingNeeded, true);
|
||||
assert.equal(bootPayload.onboarding.status, "blocked");
|
||||
assert.equal(bootPayload.onboarding.locked, true);
|
||||
assert.equal(bootPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(bootPayload.onboarding.bridgeAuthRefresh.phase, "idle");
|
||||
assert.equal(bootPayload.onboarding.required.satisfied, false);
|
||||
assert.equal(bootPayload.onboarding.required.satisfiedBy, null);
|
||||
assert.equal(bootPayload.onboarding.optional.skippable, true);
|
||||
assert.ok(bootPayload.onboarding.optional.sections.every((section: any) => section.blocking === false));
|
||||
|
||||
const providerIds = bootPayload.onboarding.required.providers.map((provider: any) => provider.id);
|
||||
assert.deepEqual(providerIds, [
|
||||
"anthropic",
|
||||
"openai",
|
||||
"github-copilot",
|
||||
"openai-codex",
|
||||
"google-gemini-cli",
|
||||
"google-antigravity",
|
||||
"google",
|
||||
"groq",
|
||||
"xai",
|
||||
"openrouter",
|
||||
"mistral",
|
||||
]);
|
||||
const anthropicProvider = bootPayload.onboarding.required.providers.find((provider: any) => provider.id === "anthropic");
|
||||
assert.equal(anthropicProvider.supports.apiKey, true);
|
||||
assert.equal(anthropicProvider.supports.oauthAvailable, true);
|
||||
|
||||
const onboardingResponse = await onboardingRoute.GET(projectRequest(fixture.projectCwd, "/api/onboarding"));
|
||||
assert.equal(onboardingResponse.status, 200);
|
||||
const onboardingPayload = (await onboardingResponse.json()) as any;
|
||||
assert.equal(onboardingPayload.onboarding.locked, true);
|
||||
assert.equal(onboardingPayload.onboarding.optional.skippable, true);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
restoreOnboardingEnv();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
|
||||
assert.equal(bootPayload.onboardingNeeded, true);
|
||||
assert.equal(bootPayload.onboarding.status, "blocked");
|
||||
assert.equal(bootPayload.onboarding.locked, true);
|
||||
assert.equal(bootPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(bootPayload.onboarding.bridgeAuthRefresh.phase, "idle");
|
||||
assert.equal(bootPayload.onboarding.required.satisfied, false);
|
||||
assert.equal(bootPayload.onboarding.required.satisfiedBy, null);
|
||||
assert.equal(bootPayload.onboarding.optional.skippable, true);
|
||||
assert.ok(bootPayload.onboarding.optional.sections.every((section: any) => section.blocking === false));
|
||||
|
||||
const providerIds = bootPayload.onboarding.required.providers.map((provider: any) => provider.id);
|
||||
assert.deepEqual(providerIds, [
|
||||
"anthropic",
|
||||
"openai",
|
||||
"github-copilot",
|
||||
"openai-codex",
|
||||
"google-gemini-cli",
|
||||
"google-antigravity",
|
||||
"google",
|
||||
"groq",
|
||||
"xai",
|
||||
"openrouter",
|
||||
"mistral",
|
||||
]);
|
||||
const anthropicProvider = bootPayload.onboarding.required.providers.find((provider: any) => provider.id === "anthropic");
|
||||
assert.equal(anthropicProvider.supports.apiKey, true);
|
||||
assert.equal(anthropicProvider.supports.oauthAvailable, true);
|
||||
|
||||
const onboardingResponse = await onboardingRoute.GET(projectRequest(fixture.projectCwd, "/api/onboarding"));
|
||||
assert.equal(onboardingResponse.status, 200);
|
||||
const onboardingPayload = (await onboardingResponse.json()) as any;
|
||||
assert.equal(onboardingPayload.onboarding.locked, true);
|
||||
assert.equal(onboardingPayload.onboarding.optional.skippable, true);
|
||||
});
|
||||
|
||||
test("runtime env-backed auth unlocks boot onboarding state and reports the environment source", async () => {
|
||||
test("runtime env-backed auth unlocks boot onboarding state and reports the environment source", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
clearOnboardingEnv();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
|
|
@ -369,36 +369,36 @@ test("runtime env-backed auth unlocks boot onboarding state and reports the envi
|
|||
getEnvApiKey: (provider: string) => (provider === "github-copilot" ? process.env.GITHUB_TOKEN : undefined),
|
||||
});
|
||||
|
||||
try {
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
|
||||
assert.equal(bootPayload.onboardingNeeded, false);
|
||||
assert.equal(bootPayload.onboarding.locked, false);
|
||||
assert.equal(bootPayload.onboarding.lockReason, null);
|
||||
assert.equal(bootPayload.onboarding.bridgeAuthRefresh.phase, "idle");
|
||||
assert.deepEqual(bootPayload.onboarding.required.satisfiedBy, {
|
||||
providerId: "github-copilot",
|
||||
source: "environment",
|
||||
});
|
||||
const copilotProvider = bootPayload.onboarding.required.providers.find((provider: any) => provider.id === "github-copilot");
|
||||
assert.equal(copilotProvider.configured, true);
|
||||
assert.equal(copilotProvider.configuredVia, "environment");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
if (previousGithubToken === undefined) {
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
} else {
|
||||
process.env.GITHUB_TOKEN = previousGithubToken;
|
||||
process.env.GITHUB_TOKEN = previousGithubToken;
|
||||
}
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
restoreOnboardingEnv();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
|
||||
assert.equal(bootPayload.onboardingNeeded, false);
|
||||
assert.equal(bootPayload.onboarding.locked, false);
|
||||
assert.equal(bootPayload.onboarding.lockReason, null);
|
||||
assert.equal(bootPayload.onboarding.bridgeAuthRefresh.phase, "idle");
|
||||
assert.deepEqual(bootPayload.onboarding.required.satisfiedBy, {
|
||||
providerId: "github-copilot",
|
||||
source: "environment",
|
||||
});
|
||||
const copilotProvider = bootPayload.onboarding.required.providers.find((provider: any) => provider.id === "github-copilot");
|
||||
assert.equal(copilotProvider.configured, true);
|
||||
assert.equal(copilotProvider.configuredVia, "environment");
|
||||
});
|
||||
|
||||
test("failed API-key validation stays locked, redacts the error, and is reflected in boot state without persisting auth", async () => {
|
||||
test("failed API-key validation stays locked, redacts the error, and is reflected in boot state without persisting auth", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
clearOnboardingEnv();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
|
|
@ -412,89 +412,89 @@ test("failed API-key validation stays locked, redacts the error, and is reflecte
|
|||
}),
|
||||
});
|
||||
|
||||
try {
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-test-secret-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(validationResponse.status, 422);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.locked, true);
|
||||
assert.equal(validationPayload.onboarding.required.satisfied, false);
|
||||
assert.equal(validationPayload.onboarding.lastValidation.status, "failed");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.providerId, "openai");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.persisted, false);
|
||||
assert.equal(validationPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "idle");
|
||||
assert.match(validationPayload.onboarding.lastValidation.message, /OpenAI rejected/i);
|
||||
assert.doesNotMatch(validationPayload.onboarding.lastValidation.message, /sk-test-secret-123456/);
|
||||
assert.equal(authStorage.hasAuth("openai"), false);
|
||||
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.onboarding.locked, true);
|
||||
assert.equal(bootPayload.onboarding.lastValidation.status, "failed");
|
||||
assert.doesNotMatch(bootPayload.onboarding.lastValidation.message, /sk-test-secret-123456/);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
restoreOnboardingEnv();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-test-secret-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(validationResponse.status, 422);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.locked, true);
|
||||
assert.equal(validationPayload.onboarding.required.satisfied, false);
|
||||
assert.equal(validationPayload.onboarding.lastValidation.status, "failed");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.providerId, "openai");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.persisted, false);
|
||||
assert.equal(validationPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "idle");
|
||||
assert.match(validationPayload.onboarding.lastValidation.message, /OpenAI rejected/i);
|
||||
assert.doesNotMatch(validationPayload.onboarding.lastValidation.message, /sk-test-secret-123456/);
|
||||
assert.equal(authStorage.hasAuth("openai"), false);
|
||||
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
assert.equal(bootResponse.status, 200);
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.onboarding.locked, true);
|
||||
assert.equal(bootPayload.onboarding.lastValidation.status, "failed");
|
||||
assert.doesNotMatch(bootPayload.onboarding.lastValidation.message, /sk-test-secret-123456/);
|
||||
});
|
||||
|
||||
test("direct prompt commands cannot bypass onboarding while required setup is still locked", async () => {
|
||||
test("direct prompt commands cannot bypass onboarding while required setup is still locked", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
clearOnboardingEnv();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
const harness = configureBridgeFixture(fixture, "sess-command-locked");
|
||||
onboarding.configureOnboardingServiceForTests({ authStorage, getEnvApiKey: noEnvApiKey });
|
||||
|
||||
try {
|
||||
const response = await commandRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "hello from bypass attempt" }),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(response.status, 423);
|
||||
const payload = (await response.json()) as any;
|
||||
assert.equal(payload.success, false);
|
||||
assert.equal(payload.command, "prompt");
|
||||
assert.equal(payload.code, "onboarding_locked");
|
||||
assert.equal(payload.details.reason, "required_setup");
|
||||
assert.equal(payload.details.onboarding.locked, true);
|
||||
assert.equal(harness.spawnCalls, 0);
|
||||
|
||||
const stateResponse = await commandRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_state" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(stateResponse.status, 200);
|
||||
const statePayload = (await stateResponse.json()) as any;
|
||||
assert.equal(statePayload.success, true);
|
||||
assert.equal(statePayload.command, "get_state");
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
restoreOnboardingEnv();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const response = await commandRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "prompt", message: "hello from bypass attempt" }),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(response.status, 423);
|
||||
const payload = (await response.json()) as any;
|
||||
assert.equal(payload.success, false);
|
||||
assert.equal(payload.command, "prompt");
|
||||
assert.equal(payload.code, "onboarding_locked");
|
||||
assert.equal(payload.details.reason, "required_setup");
|
||||
assert.equal(payload.details.onboarding.locked, true);
|
||||
assert.equal(harness.spawnCalls, 0);
|
||||
|
||||
const stateResponse = await commandRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/session/command", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ type: "get_state" }),
|
||||
}),
|
||||
);
|
||||
assert.equal(stateResponse.status, 200);
|
||||
const statePayload = (await stateResponse.json()) as any;
|
||||
assert.equal(statePayload.success, true);
|
||||
assert.equal(statePayload.command, "get_state");
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
});
|
||||
|
||||
test("bridge auth refresh failures remain inspectable and keep the workspace locked after credentials validate", async () => {
|
||||
test("bridge auth refresh failures remain inspectable and keep the workspace locked after credentials validate", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
clearOnboardingEnv();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
|
|
@ -508,43 +508,43 @@ test("bridge auth refresh failures remain inspectable and keep the workspace loc
|
|||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-valid-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(validationResponse.status, 503);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.required.satisfied, true);
|
||||
assert.equal(validationPayload.onboarding.locked, true);
|
||||
assert.equal(validationPayload.onboarding.lockReason, "bridge_refresh_failed");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.status, "succeeded");
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "failed");
|
||||
assert.match(validationPayload.onboarding.bridgeAuthRefresh.error, /bridge restart failed/i);
|
||||
assert.doesNotMatch(validationPayload.onboarding.bridgeAuthRefresh.error, /sk-refresh-secret-123456/);
|
||||
assert.equal(authStorage.hasAuth("openai"), true);
|
||||
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.onboarding.locked, true);
|
||||
assert.equal(bootPayload.onboarding.lockReason, "bridge_refresh_failed");
|
||||
assert.equal(bootPayload.onboarding.bridgeAuthRefresh.phase, "failed");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
restoreOnboardingEnv();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-valid-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(validationResponse.status, 503);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.required.satisfied, true);
|
||||
assert.equal(validationPayload.onboarding.locked, true);
|
||||
assert.equal(validationPayload.onboarding.lockReason, "bridge_refresh_failed");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.status, "succeeded");
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "failed");
|
||||
assert.match(validationPayload.onboarding.bridgeAuthRefresh.error, /bridge restart failed/i);
|
||||
assert.doesNotMatch(validationPayload.onboarding.bridgeAuthRefresh.error, /sk-refresh-secret-123456/);
|
||||
assert.equal(authStorage.hasAuth("openai"), true);
|
||||
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.onboarding.locked, true);
|
||||
assert.equal(bootPayload.onboarding.lockReason, "bridge_refresh_failed");
|
||||
assert.equal(bootPayload.onboarding.bridgeAuthRefresh.phase, "failed");
|
||||
});
|
||||
|
||||
test("successful API-key validation persists the credential and unlocks onboarding", async () => {
|
||||
test("successful API-key validation persists the credential and unlocks onboarding", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
clearOnboardingEnv();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
|
|
@ -555,47 +555,47 @@ test("successful API-key validation persists the credential and unlocks onboardi
|
|||
validateApiKey: async () => ({ ok: true, message: "openai credentials validated" }),
|
||||
});
|
||||
|
||||
try {
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-valid-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(validationResponse.status, 200);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.locked, false);
|
||||
assert.deepEqual(validationPayload.onboarding.required.satisfiedBy, {
|
||||
providerId: "openai",
|
||||
source: "auth_file",
|
||||
});
|
||||
assert.equal(validationPayload.onboarding.lastValidation.status, "succeeded");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.persisted, true);
|
||||
assert.equal(validationPayload.onboarding.lockReason, null);
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(authStorage.hasAuth("openai"), true);
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.onboarding.locked, false);
|
||||
assert.equal(bootPayload.onboarding.lockReason, null);
|
||||
assert.equal(bootPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(bootPayload.onboardingNeeded, false);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
restoreOnboardingEnv();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const validationResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "save_api_key",
|
||||
providerId: "openai",
|
||||
apiKey: "sk-valid-123456",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(validationResponse.status, 200);
|
||||
const validationPayload = (await validationResponse.json()) as any;
|
||||
assert.equal(validationPayload.onboarding.locked, false);
|
||||
assert.deepEqual(validationPayload.onboarding.required.satisfiedBy, {
|
||||
providerId: "openai",
|
||||
source: "auth_file",
|
||||
});
|
||||
assert.equal(validationPayload.onboarding.lastValidation.status, "succeeded");
|
||||
assert.equal(validationPayload.onboarding.lastValidation.persisted, true);
|
||||
assert.equal(validationPayload.onboarding.lockReason, null);
|
||||
assert.equal(validationPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(authStorage.hasAuth("openai"), true);
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
|
||||
const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootPayload = (await bootResponse.json()) as any;
|
||||
assert.equal(bootPayload.onboarding.locked, false);
|
||||
assert.equal(bootPayload.onboarding.lockReason, null);
|
||||
assert.equal(bootPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(bootPayload.onboardingNeeded, false);
|
||||
});
|
||||
|
||||
test("logout_provider removes saved auth, refreshes the bridge, and relocks onboarding when it was the only provider", async () => {
|
||||
test("logout_provider removes saved auth, refreshes the bridge, and relocks onboarding when it was the only provider", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
clearOnboardingEnv();
|
||||
const authStorage = AuthStorage.inMemory({
|
||||
|
|
@ -604,47 +604,47 @@ test("logout_provider removes saved auth, refreshes the bridge, and relocks onbo
|
|||
const harness = configureBridgeFixture(fixture, "sess-logout-success");
|
||||
onboarding.configureOnboardingServiceForTests({ authStorage, getEnvApiKey: noEnvApiKey });
|
||||
|
||||
try {
|
||||
const bootBefore = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootBeforePayload = (await bootBefore.json()) as any;
|
||||
assert.equal(bootBeforePayload.onboarding.locked, false);
|
||||
assert.equal(bootBeforePayload.onboarding.required.satisfiedBy.providerId, "openai");
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
|
||||
const logoutResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "logout_provider",
|
||||
providerId: "openai",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(logoutResponse.status, 200);
|
||||
const logoutPayload = (await logoutResponse.json()) as any;
|
||||
assert.equal(logoutPayload.onboarding.locked, true);
|
||||
assert.equal(logoutPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(logoutPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(logoutPayload.onboarding.lastValidation, null);
|
||||
assert.equal(authStorage.hasAuth("openai"), false);
|
||||
assert.equal(harness.spawnCalls, 2);
|
||||
|
||||
const bootAfter = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootAfterPayload = (await bootAfter.json()) as any;
|
||||
assert.equal(bootAfterPayload.onboarding.locked, true);
|
||||
assert.equal(bootAfterPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(bootAfterPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(bootAfterPayload.onboarding.required.satisfied, false);
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
restoreOnboardingEnv();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bootBefore = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootBeforePayload = (await bootBefore.json()) as any;
|
||||
assert.equal(bootBeforePayload.onboarding.locked, false);
|
||||
assert.equal(bootBeforePayload.onboarding.required.satisfiedBy.providerId, "openai");
|
||||
assert.equal(harness.spawnCalls, 1);
|
||||
|
||||
const logoutResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "logout_provider",
|
||||
providerId: "openai",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(logoutResponse.status, 200);
|
||||
const logoutPayload = (await logoutResponse.json()) as any;
|
||||
assert.equal(logoutPayload.onboarding.locked, true);
|
||||
assert.equal(logoutPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(logoutPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(logoutPayload.onboarding.lastValidation, null);
|
||||
assert.equal(authStorage.hasAuth("openai"), false);
|
||||
assert.equal(harness.spawnCalls, 2);
|
||||
|
||||
const bootAfter = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootAfterPayload = (await bootAfter.json()) as any;
|
||||
assert.equal(bootAfterPayload.onboarding.locked, true);
|
||||
assert.equal(bootAfterPayload.onboarding.lockReason, "required_setup");
|
||||
assert.equal(bootAfterPayload.onboarding.bridgeAuthRefresh.phase, "succeeded");
|
||||
assert.equal(bootAfterPayload.onboarding.required.satisfied, false);
|
||||
});
|
||||
|
||||
test("logout_provider fails clearly for environment-backed auth that the browser cannot remove", async () => {
|
||||
test("logout_provider fails clearly for environment-backed auth that the browser cannot remove", async (t) => {
|
||||
const fixture = makeWorkspaceFixture();
|
||||
clearOnboardingEnv();
|
||||
const authStorage = AuthStorage.inMemory({});
|
||||
|
|
@ -656,38 +656,38 @@ test("logout_provider fails clearly for environment-backed auth that the browser
|
|||
getEnvApiKey: (provider: string) => (provider === "github-copilot" ? process.env.GITHUB_TOKEN : undefined),
|
||||
});
|
||||
|
||||
try {
|
||||
const bootBefore = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootBeforePayload = (await bootBefore.json()) as any;
|
||||
assert.equal(bootBeforePayload.onboarding.locked, false);
|
||||
assert.equal(bootBeforePayload.onboarding.required.satisfiedBy.providerId, "github-copilot");
|
||||
assert.equal(bootBeforePayload.onboarding.required.satisfiedBy.source, "environment");
|
||||
|
||||
const logoutResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "logout_provider",
|
||||
providerId: "github-copilot",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(logoutResponse.status, 400);
|
||||
const logoutPayload = (await logoutResponse.json()) as any;
|
||||
assert.match(logoutPayload.error, /cannot be logged out from the browser surface/i);
|
||||
assert.equal(logoutPayload.onboarding.locked, false);
|
||||
assert.equal(logoutPayload.onboarding.required.satisfiedBy.providerId, "github-copilot");
|
||||
assert.equal(logoutPayload.onboarding.required.satisfiedBy.source, "environment");
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
if (previousGithubToken === undefined) {
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
} else {
|
||||
process.env.GITHUB_TOKEN = previousGithubToken;
|
||||
process.env.GITHUB_TOKEN = previousGithubToken;
|
||||
}
|
||||
onboarding.resetOnboardingServiceForTests();
|
||||
await bridge.resetBridgeServiceForTests();
|
||||
restoreOnboardingEnv();
|
||||
fixture.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
const bootBefore = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot"));
|
||||
const bootBeforePayload = (await bootBefore.json()) as any;
|
||||
assert.equal(bootBeforePayload.onboarding.locked, false);
|
||||
assert.equal(bootBeforePayload.onboarding.required.satisfiedBy.providerId, "github-copilot");
|
||||
assert.equal(bootBeforePayload.onboarding.required.satisfiedBy.source, "environment");
|
||||
|
||||
const logoutResponse = await onboardingRoute.POST(
|
||||
projectRequest(fixture.projectCwd, "/api/onboarding", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "logout_provider",
|
||||
providerId: "github-copilot",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(logoutResponse.status, 400);
|
||||
const logoutPayload = (await logoutResponse.json()) as any;
|
||||
assert.match(logoutPayload.error, /cannot be logged out from the browser surface/i);
|
||||
assert.equal(logoutPayload.onboarding.locked, false);
|
||||
assert.equal(logoutPayload.onboarding.required.satisfiedBy.providerId, "github-copilot");
|
||||
assert.equal(logoutPayload.onboarding.required.satisfiedBy.source, "environment");
|
||||
});
|
||||
|
|
|
|||
|
|
@ -209,7 +209,7 @@ function fakeSessionState(sessionId: string, sessionPath?: string) {
|
|||
}
|
||||
}
|
||||
|
||||
test("/api/recovery returns structured recovery diagnostics and redacts secrets", async () => {
|
||||
test("/api/recovery returns structured recovery diagnostics and redacts secrets", async (t) => {
|
||||
const fixture = makeRecoveryFixture()
|
||||
const sessionPath = createRecoverySessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-recovery")
|
||||
const harness = createHarness((command, current) => {
|
||||
|
|
@ -247,39 +247,39 @@ test("/api/recovery returns structured recovery diagnostics and redacts secrets"
|
|||
}),
|
||||
})
|
||||
|
||||
try {
|
||||
const response = await recoveryRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
const payload = await response.json() as any
|
||||
|
||||
assert.equal(payload.status, "ready")
|
||||
assert.equal(payload.project.activeSessionPath, sessionPath)
|
||||
assert.equal(payload.project.activeSessionId, "sess-recovery")
|
||||
assert.equal(payload.bridge.retry.inProgress, true)
|
||||
assert.equal(payload.bridge.retry.attempt, 2)
|
||||
assert.equal(payload.bridge.authRefresh.phase, "failed")
|
||||
assert.match(payload.bridge.authRefresh.label, /failed/i)
|
||||
assert.ok(typeof payload.doctor.total === "number")
|
||||
assert.ok(Array.isArray(payload.doctor.codes))
|
||||
assert.ok(typeof payload.validation.total === "number")
|
||||
assert.equal(payload.interruptedRun.detected, true)
|
||||
assert.match(payload.interruptedRun.lastError ?? "", /\[redacted\]/)
|
||||
assert.deepEqual(
|
||||
payload.actions.browser.map((action: { id: string }) => action.id),
|
||||
["refresh_diagnostics", "refresh_workspace", "open_retry_controls", "open_resume_controls", "open_auth_controls"],
|
||||
)
|
||||
assert.ok(payload.actions.commands.some((entry: { command: string }) => entry.command.includes("/gsd doctor")))
|
||||
|
||||
const serialized = JSON.stringify(payload)
|
||||
assert.doesNotMatch(serialized, /sk-test-recovery-secret-9999|sk-onboarding-secret-1234/)
|
||||
assert.doesNotMatch(serialized, /Crash Recovery Briefing|Completed Tool Calls|toolCallId/)
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests()
|
||||
fixture.cleanup()
|
||||
}
|
||||
});
|
||||
|
||||
const response = await recoveryRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
const payload = await response.json() as any
|
||||
|
||||
assert.equal(payload.status, "ready")
|
||||
assert.equal(payload.project.activeSessionPath, sessionPath)
|
||||
assert.equal(payload.project.activeSessionId, "sess-recovery")
|
||||
assert.equal(payload.bridge.retry.inProgress, true)
|
||||
assert.equal(payload.bridge.retry.attempt, 2)
|
||||
assert.equal(payload.bridge.authRefresh.phase, "failed")
|
||||
assert.match(payload.bridge.authRefresh.label, /failed/i)
|
||||
assert.ok(typeof payload.doctor.total === "number")
|
||||
assert.ok(Array.isArray(payload.doctor.codes))
|
||||
assert.ok(typeof payload.validation.total === "number")
|
||||
assert.equal(payload.interruptedRun.detected, true)
|
||||
assert.match(payload.interruptedRun.lastError ?? "", /\[redacted\]/)
|
||||
assert.deepEqual(
|
||||
payload.actions.browser.map((action: { id: string }) => action.id),
|
||||
["refresh_diagnostics", "refresh_workspace", "open_retry_controls", "open_resume_controls", "open_auth_controls"],
|
||||
)
|
||||
assert.ok(payload.actions.commands.some((entry: { command: string }) => entry.command.includes("/gsd doctor")))
|
||||
|
||||
const serialized = JSON.stringify(payload)
|
||||
assert.doesNotMatch(serialized, /sk-test-recovery-secret-9999|sk-onboarding-secret-1234/)
|
||||
assert.doesNotMatch(serialized, /Crash Recovery Briefing|Completed Tool Calls|toolCallId/)
|
||||
})
|
||||
|
||||
test("/api/recovery prefers the current-project resumable session when the live bridge session is out of scope", async () => {
|
||||
test("/api/recovery prefers the current-project resumable session when the live bridge session is out of scope", async (t) => {
|
||||
const fixture = makeRecoveryFixture()
|
||||
const sessionPath = createRecoverySessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-recovery")
|
||||
const externalSessionPath = join(fixture.projectCwd, "..", "agent-sessions", "2026-03-15T03-40-00-000Z_sess-external.jsonl")
|
||||
|
|
@ -308,26 +308,26 @@ test("/api/recovery prefers the current-project resumable session when the live
|
|||
getOnboardingState: async () => readyOnboardingState(),
|
||||
})
|
||||
|
||||
try {
|
||||
const response = await recoveryRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
const payload = await response.json() as any
|
||||
|
||||
assert.equal(payload.project.activeSessionPath, sessionPath)
|
||||
assert.equal(payload.project.activeSessionId, "sess-recovery")
|
||||
assert.equal(payload.interruptedRun.detected, true)
|
||||
assert.match(payload.interruptedRun.lastError ?? "", /\[redacted\]/)
|
||||
assert.deepEqual(
|
||||
payload.actions.browser.map((action: { id: string }) => action.id),
|
||||
["refresh_diagnostics", "refresh_workspace", "open_retry_controls", "open_resume_controls"],
|
||||
)
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests()
|
||||
fixture.cleanup()
|
||||
}
|
||||
});
|
||||
|
||||
const response = await recoveryRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
const payload = await response.json() as any
|
||||
|
||||
assert.equal(payload.project.activeSessionPath, sessionPath)
|
||||
assert.equal(payload.project.activeSessionId, "sess-recovery")
|
||||
assert.equal(payload.interruptedRun.detected, true)
|
||||
assert.match(payload.interruptedRun.lastError ?? "", /\[redacted\]/)
|
||||
assert.deepEqual(
|
||||
payload.actions.browser.map((action: { id: string }) => action.id),
|
||||
["refresh_diagnostics", "refresh_workspace", "open_retry_controls", "open_resume_controls"],
|
||||
)
|
||||
})
|
||||
|
||||
test("/api/recovery returns a structured empty-project payload without leaking raw diagnostics", async () => {
|
||||
test("/api/recovery returns a structured empty-project payload without leaking raw diagnostics", async (t) => {
|
||||
const fixture = makeEmptyProjectFixture()
|
||||
const harness = createHarness((command, current) => {
|
||||
if (command.type === "get_state") {
|
||||
|
|
@ -359,22 +359,22 @@ test("/api/recovery returns a structured empty-project payload without leaking r
|
|||
getOnboardingState: async () => readyOnboardingState(),
|
||||
})
|
||||
|
||||
try {
|
||||
const response = await recoveryRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
const payload = await response.json() as any
|
||||
|
||||
assert.ok(["ready", "unavailable"].includes(payload.status))
|
||||
assert.equal(payload.project.activeScope, null)
|
||||
assert.equal(payload.validation.total, 0)
|
||||
assert.ok(typeof payload.doctor.total === "number")
|
||||
assert.ok(typeof payload.interruptedRun.available === "boolean")
|
||||
assert.deepEqual(
|
||||
payload.actions.browser.map((action: { id: string }) => action.id),
|
||||
["refresh_diagnostics", "refresh_workspace"],
|
||||
)
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests()
|
||||
fixture.cleanup()
|
||||
}
|
||||
});
|
||||
|
||||
const response = await recoveryRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
const payload = await response.json() as any
|
||||
|
||||
assert.ok(["ready", "unavailable"].includes(payload.status))
|
||||
assert.equal(payload.project.activeScope, null)
|
||||
assert.equal(payload.validation.total, 0)
|
||||
assert.ok(typeof payload.doctor.total === "number")
|
||||
assert.ok(typeof payload.interruptedRun.available === "boolean")
|
||||
assert.deepEqual(
|
||||
payload.actions.browser.map((action: { id: string }) => action.id),
|
||||
["refresh_diagnostics", "refresh_workspace"],
|
||||
)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -234,7 +234,7 @@ function configureBridgeFixture(
|
|||
})
|
||||
}
|
||||
|
||||
test("/api/session/browser stays current-project scoped and carries threaded/search metadata outside /api/boot", async () => {
|
||||
test("/api/session/browser stays current-project scoped and carries threaded/search metadata outside /api/boot", async (t) => {
|
||||
const fixture = makeWorkspaceFixture()
|
||||
const rootPath = createSessionFile({
|
||||
projectCwd: fixture.projectCwd,
|
||||
|
|
@ -313,48 +313,48 @@ test("/api/session/browser stays current-project scoped and carries threaded/sea
|
|||
|
||||
configureBridgeFixture(fixture, harness)
|
||||
|
||||
try {
|
||||
const response = await browserRoute.GET(new Request("http://localhost/api/session/browser"))
|
||||
assert.equal(response.status, 200)
|
||||
const payload = await response.json() as any
|
||||
|
||||
assert.equal(payload.project.scope, "current_project")
|
||||
assert.equal(payload.project.cwd, fixture.projectCwd)
|
||||
assert.equal(payload.project.sessionsDir, fixture.sessionsDir)
|
||||
assert.equal(payload.project.activeSessionPath, childPath)
|
||||
assert.equal(payload.totalSessions, 3)
|
||||
assert.equal(payload.returnedSessions, 3)
|
||||
assert.equal(payload.sessions.some((session: any) => session.path === outsidePath), false)
|
||||
|
||||
const child = payload.sessions.find((session: any) => session.id === "sess-child")
|
||||
assert.ok(child)
|
||||
assert.equal(child.parentSessionPath, rootPath)
|
||||
assert.equal(child.firstMessage, "Investigate the branch rename")
|
||||
assert.equal(child.isActive, true)
|
||||
assert.equal(child.depth, 1)
|
||||
assert.deepEqual(child.ancestorHasNextSibling, [false])
|
||||
assert.equal("allMessagesText" in child, false)
|
||||
|
||||
const searchResponse = await browserRoute.GET(
|
||||
new Request("http://localhost/api/session/browser?query=api-session-browser&sortMode=relevance&nameFilter=named"),
|
||||
)
|
||||
assert.equal(searchResponse.status, 200)
|
||||
const searchPayload = await searchResponse.json() as any
|
||||
|
||||
assert.equal(searchPayload.totalSessions, 3)
|
||||
assert.equal(searchPayload.returnedSessions, 1)
|
||||
assert.equal(searchPayload.query.sortMode, "relevance")
|
||||
assert.equal(searchPayload.query.nameFilter, "named")
|
||||
assert.equal(searchPayload.sessions[0].id, "sess-named")
|
||||
assert.equal(searchPayload.sessions[0].name, "Release Notes")
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests()
|
||||
onboarding.resetOnboardingServiceForTests()
|
||||
fixture.cleanup()
|
||||
}
|
||||
});
|
||||
|
||||
const response = await browserRoute.GET(new Request("http://localhost/api/session/browser"))
|
||||
assert.equal(response.status, 200)
|
||||
const payload = await response.json() as any
|
||||
|
||||
assert.equal(payload.project.scope, "current_project")
|
||||
assert.equal(payload.project.cwd, fixture.projectCwd)
|
||||
assert.equal(payload.project.sessionsDir, fixture.sessionsDir)
|
||||
assert.equal(payload.project.activeSessionPath, childPath)
|
||||
assert.equal(payload.totalSessions, 3)
|
||||
assert.equal(payload.returnedSessions, 3)
|
||||
assert.equal(payload.sessions.some((session: any) => session.path === outsidePath), false)
|
||||
|
||||
const child = payload.sessions.find((session: any) => session.id === "sess-child")
|
||||
assert.ok(child)
|
||||
assert.equal(child.parentSessionPath, rootPath)
|
||||
assert.equal(child.firstMessage, "Investigate the branch rename")
|
||||
assert.equal(child.isActive, true)
|
||||
assert.equal(child.depth, 1)
|
||||
assert.deepEqual(child.ancestorHasNextSibling, [false])
|
||||
assert.equal("allMessagesText" in child, false)
|
||||
|
||||
const searchResponse = await browserRoute.GET(
|
||||
new Request("http://localhost/api/session/browser?query=api-session-browser&sortMode=relevance&nameFilter=named"),
|
||||
)
|
||||
assert.equal(searchResponse.status, 200)
|
||||
const searchPayload = await searchResponse.json() as any
|
||||
|
||||
assert.equal(searchPayload.totalSessions, 3)
|
||||
assert.equal(searchPayload.returnedSessions, 1)
|
||||
assert.equal(searchPayload.query.sortMode, "relevance")
|
||||
assert.equal(searchPayload.query.nameFilter, "named")
|
||||
assert.equal(searchPayload.sessions[0].id, "sess-named")
|
||||
assert.equal(searchPayload.sessions[0].name, "Release Notes")
|
||||
})
|
||||
|
||||
test("/api/session/manage renames the active session through bridge-aware RPC instead of mutating the file directly", async () => {
|
||||
test("/api/session/manage renames the active session through bridge-aware RPC instead of mutating the file directly", async (t) => {
|
||||
const fixture = makeWorkspaceFixture()
|
||||
const activePath = createSessionFile({
|
||||
projectCwd: fixture.projectCwd,
|
||||
|
|
@ -415,35 +415,35 @@ test("/api/session/manage renames the active session through bridge-aware RPC in
|
|||
} as any),
|
||||
})
|
||||
|
||||
try {
|
||||
const response = await manageRoute.POST(
|
||||
new Request("http://localhost/api/session/manage", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "rename",
|
||||
sessionPath: activePath,
|
||||
name: "Active Renamed",
|
||||
}),
|
||||
}),
|
||||
)
|
||||
const payload = await response.json() as any
|
||||
await waitForMicrotasks()
|
||||
|
||||
assert.equal(response.status, 200)
|
||||
assert.equal(payload.success, true)
|
||||
assert.equal(payload.sessionPath, activePath)
|
||||
assert.equal(payload.isActiveSession, true)
|
||||
assert.equal(payload.mutation, "rpc")
|
||||
assert.ok(harness.commands.some((command) => command.type === "set_session_name" && command.name === "Active Renamed"))
|
||||
assert.equal(getLatestSessionName(activePath), "Before Active Rename")
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests()
|
||||
onboarding.resetOnboardingServiceForTests()
|
||||
fixture.cleanup()
|
||||
}
|
||||
});
|
||||
|
||||
const response = await manageRoute.POST(
|
||||
new Request("http://localhost/api/session/manage", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "rename",
|
||||
sessionPath: activePath,
|
||||
name: "Active Renamed",
|
||||
}),
|
||||
}),
|
||||
)
|
||||
const payload = await response.json() as any
|
||||
await waitForMicrotasks()
|
||||
|
||||
assert.equal(response.status, 200)
|
||||
assert.equal(payload.success, true)
|
||||
assert.equal(payload.sessionPath, activePath)
|
||||
assert.equal(payload.isActiveSession, true)
|
||||
assert.equal(payload.mutation, "rpc")
|
||||
assert.ok(harness.commands.some((command) => command.type === "set_session_name" && command.name === "Active Renamed"))
|
||||
assert.equal(getLatestSessionName(activePath), "Before Active Rename")
|
||||
})
|
||||
|
||||
test("/api/session/manage renames inactive sessions via authoritative session-file mutation and rejects out-of-scope paths", async () => {
|
||||
test("/api/session/manage renames inactive sessions via authoritative session-file mutation and rejects out-of-scope paths", async (t) => {
|
||||
const fixture = makeWorkspaceFixture()
|
||||
const activePath = createSessionFile({
|
||||
projectCwd: fixture.projectCwd,
|
||||
|
|
@ -520,122 +520,118 @@ test("/api/session/manage renames inactive sessions via authoritative session-fi
|
|||
} as any),
|
||||
})
|
||||
|
||||
try {
|
||||
const renameResponse = await manageRoute.POST(
|
||||
new Request("http://localhost/api/session/manage", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "rename",
|
||||
sessionPath: inactivePath,
|
||||
name: "Inactive Renamed",
|
||||
}),
|
||||
}),
|
||||
)
|
||||
const renamePayload = await renameResponse.json() as any
|
||||
|
||||
assert.equal(renameResponse.status, 200)
|
||||
assert.equal(renamePayload.success, true)
|
||||
assert.equal(renamePayload.isActiveSession, false)
|
||||
assert.equal(renamePayload.mutation, "session_file")
|
||||
assert.equal(getLatestSessionName(inactivePath), "Inactive Renamed")
|
||||
assert.equal(harness.commands.some((command) => command.type === "set_session_name"), false)
|
||||
|
||||
const outsideResponse = await manageRoute.POST(
|
||||
new Request("http://localhost/api/session/manage", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "rename",
|
||||
sessionPath: outsidePath,
|
||||
name: "Should Fail",
|
||||
}),
|
||||
}),
|
||||
)
|
||||
const outsidePayload = await outsideResponse.json() as any
|
||||
|
||||
assert.equal(outsideResponse.status, 404)
|
||||
assert.equal(outsidePayload.success, false)
|
||||
assert.equal(outsidePayload.code, "not_found")
|
||||
assert.equal(getLatestSessionName(outsidePath), "Outside Session")
|
||||
} finally {
|
||||
t.after(async () => {
|
||||
await bridge.resetBridgeServiceForTests()
|
||||
onboarding.resetOnboardingServiceForTests()
|
||||
fixture.cleanup()
|
||||
}
|
||||
});
|
||||
|
||||
const renameResponse = await manageRoute.POST(
|
||||
new Request("http://localhost/api/session/manage", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "rename",
|
||||
sessionPath: inactivePath,
|
||||
name: "Inactive Renamed",
|
||||
}),
|
||||
}),
|
||||
)
|
||||
const renamePayload = await renameResponse.json() as any
|
||||
|
||||
assert.equal(renameResponse.status, 200)
|
||||
assert.equal(renamePayload.success, true)
|
||||
assert.equal(renamePayload.isActiveSession, false)
|
||||
assert.equal(renamePayload.mutation, "session_file")
|
||||
assert.equal(getLatestSessionName(inactivePath), "Inactive Renamed")
|
||||
assert.equal(harness.commands.some((command) => command.type === "set_session_name"), false)
|
||||
|
||||
const outsideResponse = await manageRoute.POST(
|
||||
new Request("http://localhost/api/session/manage", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
action: "rename",
|
||||
sessionPath: outsidePath,
|
||||
name: "Should Fail",
|
||||
}),
|
||||
}),
|
||||
)
|
||||
const outsidePayload = await outsideResponse.json() as any
|
||||
|
||||
assert.equal(outsideResponse.status, 404)
|
||||
assert.equal(outsidePayload.success, false)
|
||||
assert.equal(outsidePayload.code, "not_found")
|
||||
assert.equal(getLatestSessionName(outsidePath), "Outside Session")
|
||||
})
|
||||
|
||||
test("/api/git returns a current-project-scoped repo summary and ignores changes outside the current project subtree", async () => {
|
||||
test("/api/git returns a current-project-scoped repo summary and ignores changes outside the current project subtree", async (t) => {
|
||||
const root = mkdtempSync(join(tmpdir(), "gsd-web-git-summary-"))
|
||||
const repoRoot = join(root, "repo")
|
||||
const projectCwd = join(repoRoot, "apps", "current-project")
|
||||
const docsDir = join(repoRoot, "docs")
|
||||
|
||||
try {
|
||||
mkdirSync(projectCwd, { recursive: true })
|
||||
mkdirSync(docsDir, { recursive: true })
|
||||
t.after(() => { rmSync(root, { recursive: true, force: true }) });
|
||||
|
||||
writeFileSync(join(projectCwd, "staged.txt"), "baseline staged\n")
|
||||
writeFileSync(join(projectCwd, "dirty.txt"), "baseline dirty\n")
|
||||
writeFileSync(join(docsDir, "outside.txt"), "baseline outside\n")
|
||||
mkdirSync(projectCwd, { recursive: true })
|
||||
mkdirSync(docsDir, { recursive: true })
|
||||
|
||||
git(repoRoot, ["init"])
|
||||
git(repoRoot, ["config", "user.name", "GSD Test"])
|
||||
git(repoRoot, ["config", "user.email", "gsd-test@example.com"])
|
||||
git(repoRoot, ["add", "."])
|
||||
git(repoRoot, ["commit", "-m", "initial"])
|
||||
writeFileSync(join(projectCwd, "staged.txt"), "baseline staged\n")
|
||||
writeFileSync(join(projectCwd, "dirty.txt"), "baseline dirty\n")
|
||||
writeFileSync(join(docsDir, "outside.txt"), "baseline outside\n")
|
||||
|
||||
writeFileSync(join(projectCwd, "staged.txt"), "baseline staged\nnext staged line\n")
|
||||
git(repoRoot, ["add", "apps/current-project/staged.txt"])
|
||||
writeFileSync(join(projectCwd, "dirty.txt"), "baseline dirty\nnext dirty line\n")
|
||||
writeFileSync(join(projectCwd, "untracked.txt"), "brand new\n")
|
||||
writeFileSync(join(docsDir, "outside.txt"), "baseline outside\noutside change\n")
|
||||
git(repoRoot, ["init"])
|
||||
git(repoRoot, ["config", "user.name", "GSD Test"])
|
||||
git(repoRoot, ["config", "user.email", "gsd-test@example.com"])
|
||||
git(repoRoot, ["add", "."])
|
||||
git(repoRoot, ["commit", "-m", "initial"])
|
||||
|
||||
const authoritativeRepoRoot = resolve(git(projectCwd, ["rev-parse", "--show-toplevel"]))
|
||||
writeFileSync(join(projectCwd, "staged.txt"), "baseline staged\nnext staged line\n")
|
||||
git(repoRoot, ["add", "apps/current-project/staged.txt"])
|
||||
writeFileSync(join(projectCwd, "dirty.txt"), "baseline dirty\nnext dirty line\n")
|
||||
writeFileSync(join(projectCwd, "untracked.txt"), "brand new\n")
|
||||
writeFileSync(join(docsDir, "outside.txt"), "baseline outside\noutside change\n")
|
||||
|
||||
await withProjectGitEnv(projectCwd, async () => {
|
||||
const response = await gitRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
const authoritativeRepoRoot = resolve(git(projectCwd, ["rev-parse", "--show-toplevel"]))
|
||||
|
||||
const payload = await response.json() as any
|
||||
assert.equal(payload.kind, "repo")
|
||||
assert.equal(payload.project.scope, "current_project")
|
||||
assert.equal(payload.project.cwd, projectCwd)
|
||||
assert.equal(payload.project.repoRoot, authoritativeRepoRoot)
|
||||
assert.equal(payload.project.repoRelativePath, "apps/current-project")
|
||||
assert.equal(payload.hasChanges, true)
|
||||
assert.equal(payload.counts.changed, 3)
|
||||
assert.equal(payload.counts.staged, 1)
|
||||
assert.equal(payload.counts.dirty, 1)
|
||||
assert.equal(payload.counts.untracked, 1)
|
||||
assert.equal(payload.counts.conflicts, 0)
|
||||
assert.equal(payload.changedFiles.some((file: any) => file.repoPath === "docs/outside.txt"), false)
|
||||
assert.deepEqual(
|
||||
payload.changedFiles.map((file: any) => file.path).sort(),
|
||||
["dirty.txt", "staged.txt", "untracked.txt"],
|
||||
)
|
||||
})
|
||||
} finally {
|
||||
rmSync(root, { recursive: true, force: true })
|
||||
}
|
||||
await withProjectGitEnv(projectCwd, async () => {
|
||||
const response = await gitRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
|
||||
const payload = await response.json() as any
|
||||
assert.equal(payload.kind, "repo")
|
||||
assert.equal(payload.project.scope, "current_project")
|
||||
assert.equal(payload.project.cwd, projectCwd)
|
||||
assert.equal(payload.project.repoRoot, authoritativeRepoRoot)
|
||||
assert.equal(payload.project.repoRelativePath, "apps/current-project")
|
||||
assert.equal(payload.hasChanges, true)
|
||||
assert.equal(payload.counts.changed, 3)
|
||||
assert.equal(payload.counts.staged, 1)
|
||||
assert.equal(payload.counts.dirty, 1)
|
||||
assert.equal(payload.counts.untracked, 1)
|
||||
assert.equal(payload.counts.conflicts, 0)
|
||||
assert.equal(payload.changedFiles.some((file: any) => file.repoPath === "docs/outside.txt"), false)
|
||||
assert.deepEqual(
|
||||
payload.changedFiles.map((file: any) => file.path).sort(),
|
||||
["dirty.txt", "staged.txt", "untracked.txt"],
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
test("/api/git exposes an explicit not-a-repo state instead of failing silently", async () => {
|
||||
test("/api/git exposes an explicit not-a-repo state instead of failing silently", async (t) => {
|
||||
const projectCwd = mkdtempSync(join(tmpdir(), "gsd-web-not-repo-"))
|
||||
|
||||
try {
|
||||
await withProjectGitEnv(projectCwd, async () => {
|
||||
const response = await gitRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
t.after(() => { rmSync(projectCwd, { recursive: true, force: true }) });
|
||||
|
||||
const payload = await response.json() as any
|
||||
assert.equal(payload.kind, "not_repo")
|
||||
assert.equal(payload.project.scope, "current_project")
|
||||
assert.equal(payload.project.cwd, projectCwd)
|
||||
assert.equal(payload.project.repoRoot, null)
|
||||
assert.match(payload.message, /not inside a Git repository/i)
|
||||
})
|
||||
} finally {
|
||||
rmSync(projectCwd, { recursive: true, force: true })
|
||||
}
|
||||
await withProjectGitEnv(projectCwd, async () => {
|
||||
const response = await gitRoute.GET()
|
||||
assert.equal(response.status, 200)
|
||||
|
||||
const payload = await response.json() as any
|
||||
assert.equal(payload.kind, "not_repo")
|
||||
assert.equal(payload.project.scope, "current_project")
|
||||
assert.equal(payload.project.cwd, projectCwd)
|
||||
assert.equal(payload.project.repoRoot, null)
|
||||
assert.match(payload.message, /not inside a Git repository/i)
|
||||
})
|
||||
})
|
||||
|
||||
test("browser session, settings, and git surfaces keep inspectable browse/manage/state markers on the shared surface", () => {
|
||||
|
|
|
|||
|
|
@ -26,90 +26,86 @@ function makeGsdFixture(): { root: string; gsdDir: string; cleanup: () => void }
|
|||
}
|
||||
|
||||
// ─── Group 1: Workspace index — risk/depends/demo fields ─────────────
|
||||
test("indexWorkspace extracts risk, depends, and demo from roadmap", async () => {
|
||||
test("indexWorkspace extracts risk, depends, and demo from roadmap", async (t) => {
|
||||
const { root, gsdDir, cleanup } = makeGsdFixture();
|
||||
|
||||
try {
|
||||
const milestoneDir = join(gsdDir, "milestones", "M001");
|
||||
const sliceDir = join(milestoneDir, "slices", "S01");
|
||||
const tasksDir = join(sliceDir, "tasks");
|
||||
mkdirSync(tasksDir, { recursive: true });
|
||||
t.after(() => { cleanup(); });
|
||||
|
||||
writeFileSync(
|
||||
join(milestoneDir, "M001-ROADMAP.md"),
|
||||
[
|
||||
"# M001: Test Milestone",
|
||||
"",
|
||||
"## Slices",
|
||||
"- [ ] **S01: Feature slice** `risk:high` `depends:[S00]`",
|
||||
" > After this: users can see the dashboard",
|
||||
].join("\n"),
|
||||
);
|
||||
const milestoneDir = join(gsdDir, "milestones", "M001");
|
||||
const sliceDir = join(milestoneDir, "slices", "S01");
|
||||
const tasksDir = join(sliceDir, "tasks");
|
||||
mkdirSync(tasksDir, { recursive: true });
|
||||
|
||||
writeFileSync(
|
||||
join(sliceDir, "S01-PLAN.md"),
|
||||
[
|
||||
"# S01: Feature slice",
|
||||
"",
|
||||
"**Goal:** Build the feature",
|
||||
"**Demo:** Dashboard renders",
|
||||
"",
|
||||
"## Tasks",
|
||||
"- [ ] **T01: Build thing** `est:30m`",
|
||||
" Do the work.",
|
||||
].join("\n"),
|
||||
);
|
||||
writeFileSync(
|
||||
join(milestoneDir, "M001-ROADMAP.md"),
|
||||
[
|
||||
"# M001: Test Milestone",
|
||||
"",
|
||||
"## Slices",
|
||||
"- [ ] **S01: Feature slice** `risk:high` `depends:[S00]`",
|
||||
" > After this: users can see the dashboard",
|
||||
].join("\n"),
|
||||
);
|
||||
|
||||
writeFileSync(join(tasksDir, "T01-PLAN.md"), "# T01: Build thing\n\n## Steps\n- do it\n");
|
||||
writeFileSync(
|
||||
join(sliceDir, "S01-PLAN.md"),
|
||||
[
|
||||
"# S01: Feature slice",
|
||||
"",
|
||||
"**Goal:** Build the feature",
|
||||
"**Demo:** Dashboard renders",
|
||||
"",
|
||||
"## Tasks",
|
||||
"- [ ] **T01: Build thing** `est:30m`",
|
||||
" Do the work.",
|
||||
].join("\n"),
|
||||
);
|
||||
|
||||
const index = await workspaceIndex.indexWorkspace(root);
|
||||
writeFileSync(join(tasksDir, "T01-PLAN.md"), "# T01: Build thing\n\n## Steps\n- do it\n");
|
||||
|
||||
assert.equal(index.milestones.length, 1);
|
||||
assert.equal(index.milestones[0].id, "M001");
|
||||
const index = await workspaceIndex.indexWorkspace(root);
|
||||
|
||||
const slice = index.milestones[0].slices[0];
|
||||
assert.equal(slice.id, "S01");
|
||||
assert.equal(slice.risk, "high");
|
||||
assert.deepEqual(slice.depends, ["S00"]);
|
||||
assert.equal(slice.demo, "users can see the dashboard");
|
||||
assert.equal(slice.done, false);
|
||||
assert.equal(slice.tasks.length, 1);
|
||||
assert.equal(slice.tasks[0].id, "T01");
|
||||
assert.equal(slice.tasks[0].done, false);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
assert.equal(index.milestones.length, 1);
|
||||
assert.equal(index.milestones[0].id, "M001");
|
||||
|
||||
const slice = index.milestones[0].slices[0];
|
||||
assert.equal(slice.id, "S01");
|
||||
assert.equal(slice.risk, "high");
|
||||
assert.deepEqual(slice.depends, ["S00"]);
|
||||
assert.equal(slice.demo, "users can see the dashboard");
|
||||
assert.equal(slice.done, false);
|
||||
assert.equal(slice.tasks.length, 1);
|
||||
assert.equal(slice.tasks[0].id, "T01");
|
||||
assert.equal(slice.tasks[0].done, false);
|
||||
});
|
||||
|
||||
test("indexWorkspace handles slices without risk/depends/demo", async () => {
|
||||
test("indexWorkspace handles slices without risk/depends/demo", async (t) => {
|
||||
const { root, gsdDir, cleanup } = makeGsdFixture();
|
||||
|
||||
try {
|
||||
const milestoneDir = join(gsdDir, "milestones", "M001");
|
||||
const sliceDir = join(milestoneDir, "slices", "S01");
|
||||
mkdirSync(join(sliceDir, "tasks"), { recursive: true });
|
||||
t.after(() => { cleanup(); });
|
||||
|
||||
writeFileSync(
|
||||
join(milestoneDir, "M001-ROADMAP.md"),
|
||||
"# M001: Minimal\n\n## Slices\n- [x] **S01: Done slice**\n",
|
||||
);
|
||||
const milestoneDir = join(gsdDir, "milestones", "M001");
|
||||
const sliceDir = join(milestoneDir, "slices", "S01");
|
||||
mkdirSync(join(sliceDir, "tasks"), { recursive: true });
|
||||
|
||||
writeFileSync(
|
||||
join(sliceDir, "S01-PLAN.md"),
|
||||
"# S01: Done slice\n\n**Goal:** Done\n\n## Tasks\n",
|
||||
);
|
||||
writeFileSync(
|
||||
join(milestoneDir, "M001-ROADMAP.md"),
|
||||
"# M001: Minimal\n\n## Slices\n- [x] **S01: Done slice**\n",
|
||||
);
|
||||
|
||||
const index = await workspaceIndex.indexWorkspace(root);
|
||||
writeFileSync(
|
||||
join(sliceDir, "S01-PLAN.md"),
|
||||
"# S01: Done slice\n\n**Goal:** Done\n\n## Tasks\n",
|
||||
);
|
||||
|
||||
const slice = index.milestones[0].slices[0];
|
||||
// Parser defaults risk to "low" when not specified, demo to "" when no blockquote
|
||||
assert.equal(slice.risk, "low");
|
||||
assert.deepEqual(slice.depends, []);
|
||||
assert.equal(slice.demo, "");
|
||||
assert.equal(slice.done, true);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
const index = await workspaceIndex.indexWorkspace(root);
|
||||
|
||||
const slice = index.milestones[0].slices[0];
|
||||
// Parser defaults risk to "low" when not specified, demo to "" when no blockquote
|
||||
assert.equal(slice.risk, "low");
|
||||
assert.deepEqual(slice.depends, []);
|
||||
assert.equal(slice.demo, "");
|
||||
assert.equal(slice.done, true);
|
||||
});
|
||||
|
||||
// ─── Group 2: Shared status helpers ──────────────────────────────────
|
||||
|
|
@ -195,174 +191,174 @@ test("getTaskStatus returns correct statuses", () => {
|
|||
});
|
||||
|
||||
// ─── Group 3: Files API — tree listing ───────────────────────────────
|
||||
test("files API returns tree listing of .gsd/ directory", async () => {
|
||||
test("files API returns tree listing of .gsd/ directory", async (t) => {
|
||||
const { root, gsdDir, cleanup } = makeGsdFixture();
|
||||
const origEnv = process.env.GSD_WEB_PROJECT_CWD;
|
||||
|
||||
try {
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
// Create some files
|
||||
writeFileSync(join(gsdDir, "STATE.md"), "# State\nactive");
|
||||
writeFileSync(join(gsdDir, "PROJECT.md"), "# Project");
|
||||
const msDir = join(gsdDir, "milestones", "M001");
|
||||
mkdirSync(msDir, { recursive: true });
|
||||
writeFileSync(join(msDir, "M001-ROADMAP.md"), "# Roadmap");
|
||||
|
||||
const request = new Request("http://localhost:3000/api/files");
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 200);
|
||||
|
||||
const data = await response.json();
|
||||
assert.ok(Array.isArray(data.tree));
|
||||
assert.ok(data.tree.length > 0);
|
||||
|
||||
// Should have files at root level
|
||||
const names = data.tree.map((n: { name: string }) => n.name);
|
||||
assert.ok(names.includes("STATE.md"), `Expected STATE.md in tree, got: ${names}`);
|
||||
assert.ok(names.includes("PROJECT.md"), `Expected PROJECT.md in tree, got: ${names}`);
|
||||
assert.ok(names.includes("milestones"), `Expected milestones in tree, got: ${names}`);
|
||||
|
||||
// milestones should be a directory with children
|
||||
const milestones = data.tree.find((n: { name: string }) => n.name === "milestones");
|
||||
assert.equal(milestones.type, "directory");
|
||||
assert.ok(Array.isArray(milestones.children));
|
||||
assert.ok(milestones.children.length > 0);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
process.env.GSD_WEB_PROJECT_CWD = origEnv;
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
// Create some files
|
||||
writeFileSync(join(gsdDir, "STATE.md"), "# State\nactive");
|
||||
writeFileSync(join(gsdDir, "PROJECT.md"), "# Project");
|
||||
const msDir = join(gsdDir, "milestones", "M001");
|
||||
mkdirSync(msDir, { recursive: true });
|
||||
writeFileSync(join(msDir, "M001-ROADMAP.md"), "# Roadmap");
|
||||
|
||||
const request = new Request("http://localhost:3000/api/files");
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 200);
|
||||
|
||||
const data = await response.json();
|
||||
assert.ok(Array.isArray(data.tree));
|
||||
assert.ok(data.tree.length > 0);
|
||||
|
||||
// Should have files at root level
|
||||
const names = data.tree.map((n: { name: string }) => n.name);
|
||||
assert.ok(names.includes("STATE.md"), `Expected STATE.md in tree, got: ${names}`);
|
||||
assert.ok(names.includes("PROJECT.md"), `Expected PROJECT.md in tree, got: ${names}`);
|
||||
assert.ok(names.includes("milestones"), `Expected milestones in tree, got: ${names}`);
|
||||
|
||||
// milestones should be a directory with children
|
||||
const milestones = data.tree.find((n: { name: string }) => n.name === "milestones");
|
||||
assert.equal(milestones.type, "directory");
|
||||
assert.ok(Array.isArray(milestones.children));
|
||||
assert.ok(milestones.children.length > 0);
|
||||
});
|
||||
|
||||
// ─── Group 4: Files API — file content ───────────────────────────────
|
||||
test("files API returns file content for valid path", async () => {
|
||||
test("files API returns file content for valid path", async (t) => {
|
||||
const { root, gsdDir, cleanup } = makeGsdFixture();
|
||||
const origEnv = process.env.GSD_WEB_PROJECT_CWD;
|
||||
|
||||
try {
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const fileContent = "# State\n\nCurrent milestone: M001";
|
||||
writeFileSync(join(gsdDir, "STATE.md"), fileContent);
|
||||
|
||||
const request = new Request("http://localhost:3000/api/files?path=STATE.md");
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 200);
|
||||
|
||||
const data = await response.json();
|
||||
assert.equal(data.content, fileContent);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
process.env.GSD_WEB_PROJECT_CWD = origEnv;
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const fileContent = "# State\n\nCurrent milestone: M001";
|
||||
writeFileSync(join(gsdDir, "STATE.md"), fileContent);
|
||||
|
||||
const request = new Request("http://localhost:3000/api/files?path=STATE.md");
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 200);
|
||||
|
||||
const data = await response.json();
|
||||
assert.equal(data.content, fileContent);
|
||||
});
|
||||
|
||||
test("files API returns content for nested files", async () => {
|
||||
test("files API returns content for nested files", async (t) => {
|
||||
const { root, gsdDir, cleanup } = makeGsdFixture();
|
||||
const origEnv = process.env.GSD_WEB_PROJECT_CWD;
|
||||
|
||||
try {
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const msDir = join(gsdDir, "milestones", "M001");
|
||||
mkdirSync(msDir, { recursive: true });
|
||||
writeFileSync(join(msDir, "M001-ROADMAP.md"), "# Roadmap content");
|
||||
|
||||
const request = new Request(
|
||||
"http://localhost:3000/api/files?path=milestones/M001/M001-ROADMAP.md",
|
||||
);
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 200);
|
||||
|
||||
const data = await response.json();
|
||||
assert.equal(data.content, "# Roadmap content");
|
||||
} finally {
|
||||
t.after(() => {
|
||||
process.env.GSD_WEB_PROJECT_CWD = origEnv;
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const msDir = join(gsdDir, "milestones", "M001");
|
||||
mkdirSync(msDir, { recursive: true });
|
||||
writeFileSync(join(msDir, "M001-ROADMAP.md"), "# Roadmap content");
|
||||
|
||||
const request = new Request(
|
||||
"http://localhost:3000/api/files?path=milestones/M001/M001-ROADMAP.md",
|
||||
);
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 200);
|
||||
|
||||
const data = await response.json();
|
||||
assert.equal(data.content, "# Roadmap content");
|
||||
});
|
||||
|
||||
// ─── Group 5: Files API — security: path traversal rejection ─────────
|
||||
test("files API rejects path traversal with ../", async () => {
|
||||
test("files API rejects path traversal with ../", async (t) => {
|
||||
const { root, cleanup } = makeGsdFixture();
|
||||
const origEnv = process.env.GSD_WEB_PROJECT_CWD;
|
||||
|
||||
try {
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const request = new Request(
|
||||
"http://localhost:3000/api/files?path=../etc/passwd",
|
||||
);
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 400);
|
||||
|
||||
const data = await response.json();
|
||||
assert.ok(data.error, "Expected error message in response");
|
||||
} finally {
|
||||
t.after(() => {
|
||||
process.env.GSD_WEB_PROJECT_CWD = origEnv;
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const request = new Request(
|
||||
"http://localhost:3000/api/files?path=../etc/passwd",
|
||||
);
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 400);
|
||||
|
||||
const data = await response.json();
|
||||
assert.ok(data.error, "Expected error message in response");
|
||||
});
|
||||
|
||||
test("files API rejects absolute paths", async () => {
|
||||
test("files API rejects absolute paths", async (t) => {
|
||||
const { root, cleanup } = makeGsdFixture();
|
||||
const origEnv = process.env.GSD_WEB_PROJECT_CWD;
|
||||
|
||||
try {
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const request = new Request(
|
||||
"http://localhost:3000/api/files?path=/etc/passwd",
|
||||
);
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 400);
|
||||
|
||||
const data = await response.json();
|
||||
assert.ok(data.error);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
process.env.GSD_WEB_PROJECT_CWD = origEnv;
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const request = new Request(
|
||||
"http://localhost:3000/api/files?path=/etc/passwd",
|
||||
);
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 400);
|
||||
|
||||
const data = await response.json();
|
||||
assert.ok(data.error);
|
||||
});
|
||||
|
||||
test("files API returns 404 for missing files", async () => {
|
||||
test("files API returns 404 for missing files", async (t) => {
|
||||
const { root, cleanup } = makeGsdFixture();
|
||||
const origEnv = process.env.GSD_WEB_PROJECT_CWD;
|
||||
|
||||
try {
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const request = new Request(
|
||||
"http://localhost:3000/api/files?path=nonexistent.md",
|
||||
);
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 404);
|
||||
|
||||
const data = await response.json();
|
||||
assert.ok(data.error);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
process.env.GSD_WEB_PROJECT_CWD = origEnv;
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const request = new Request(
|
||||
"http://localhost:3000/api/files?path=nonexistent.md",
|
||||
);
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 404);
|
||||
|
||||
const data = await response.json();
|
||||
assert.ok(data.error);
|
||||
});
|
||||
|
||||
test("files API returns empty tree when .gsd/ does not exist", async () => {
|
||||
test("files API returns empty tree when .gsd/ does not exist", async (t) => {
|
||||
const root = mkdtempSync(join(tmpdir(), "gsd-state-surfaces-empty-"));
|
||||
const origEnv = process.env.GSD_WEB_PROJECT_CWD;
|
||||
|
||||
try {
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const request = new Request("http://localhost:3000/api/files");
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 200);
|
||||
|
||||
const data = await response.json();
|
||||
assert.deepEqual(data.tree, []);
|
||||
} finally {
|
||||
t.after(() => {
|
||||
process.env.GSD_WEB_PROJECT_CWD = origEnv;
|
||||
rmSync(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
process.env.GSD_WEB_PROJECT_CWD = root;
|
||||
|
||||
const request = new Request("http://localhost:3000/api/files");
|
||||
const response = await filesRoute.GET(request);
|
||||
assert.equal(response.status, 200);
|
||||
|
||||
const data = await response.json();
|
||||
assert.deepEqual(data.tree, []);
|
||||
});
|
||||
|
||||
// ─── Group 6: Mock-free invariant — no static mock data ──────────────
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ test("derivePendingWorkflowCommandLabel falls back to the command type when no i
|
|||
assert.equal(label, "/abort")
|
||||
})
|
||||
|
||||
test("navigateToGSDView dispatches the shared browser navigation event", () => {
|
||||
test("navigateToGSDView dispatches the shared browser navigation event", (t) => {
|
||||
const originalWindow = (globalThis as { window?: EventTarget }).window
|
||||
const fakeWindow = new EventTarget()
|
||||
const seen: string[] = []
|
||||
|
|
@ -40,16 +40,14 @@ test("navigateToGSDView dispatches the shared browser navigation event", () => {
|
|||
|
||||
;(globalThis as { window?: EventTarget }).window = fakeWindow
|
||||
|
||||
try {
|
||||
navigateToGSDView("power")
|
||||
} finally {
|
||||
;(globalThis as { window?: EventTarget }).window = originalWindow
|
||||
}
|
||||
t.after(() => { ;(globalThis as { window?: EventTarget }).window = originalWindow });
|
||||
|
||||
navigateToGSDView("power")
|
||||
|
||||
assert.deepEqual(seen, ["power"])
|
||||
})
|
||||
|
||||
test("executeWorkflowActionInPowerMode calls dispatch and navigates to the appropriate view", async () => {
|
||||
test("executeWorkflowActionInPowerMode calls dispatch and navigates to the appropriate view", async (t) => {
|
||||
const originalWindow = (globalThis as { window?: EventTarget }).window
|
||||
const originalLocalStorage = (globalThis as any).localStorage
|
||||
const fakeWindow = new EventTarget()
|
||||
|
|
@ -63,18 +61,18 @@ test("executeWorkflowActionInPowerMode calls dispatch and navigates to the appro
|
|||
;(globalThis as { window?: EventTarget }).window = fakeWindow
|
||||
;(globalThis as any).localStorage = { getItem: () => null, setItem: () => {} }
|
||||
|
||||
try {
|
||||
executeWorkflowActionInPowerMode({
|
||||
dispatch: async () => {
|
||||
dispatchCalled = true
|
||||
},
|
||||
})
|
||||
// dispatch is fire-and-forget, give it a tick to resolve
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
} finally {
|
||||
t.after(() => {
|
||||
;(globalThis as { window?: EventTarget }).window = originalWindow
|
||||
;(globalThis as any).localStorage = originalLocalStorage
|
||||
}
|
||||
});
|
||||
|
||||
executeWorkflowActionInPowerMode({
|
||||
dispatch: async () => {
|
||||
dispatchCalled = true
|
||||
},
|
||||
})
|
||||
// dispatch is fire-and-forget, give it a tick to resolve
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
|
||||
assert.equal(dispatchCalled, true, "dispatch should have been called")
|
||||
assert.ok(seenViews.length > 0, "should navigate to a view")
|
||||
|
|
|
|||
|
|
@ -51,20 +51,20 @@ test('renders cwd hint', () => {
|
|||
assert.ok(out.includes('/gsd to begin'), 'hint line missing')
|
||||
})
|
||||
|
||||
test('skips when not a TTY', () => {
|
||||
test('skips when not a TTY', (t) => {
|
||||
const chunks: string[] = []
|
||||
const original = process.stderr.write.bind(process.stderr)
|
||||
;(process.stderr as any).write = (chunk: string) => { chunks.push(chunk); return true }
|
||||
const origIsTTY = (process.stderr as any).isTTY
|
||||
;(process.stderr as any).isTTY = false
|
||||
|
||||
try {
|
||||
printWelcomeScreen({ version: '1.0.0' })
|
||||
assert.equal(chunks.join(''), '', 'should produce no output when not TTY')
|
||||
} finally {
|
||||
t.after(() => {
|
||||
;(process.stderr as any).write = original
|
||||
;(process.stderr as any).isTTY = origIsTTY
|
||||
}
|
||||
});
|
||||
|
||||
printWelcomeScreen({ version: '1.0.0' })
|
||||
assert.equal(chunks.join(''), '', 'should produce no output when not TTY')
|
||||
})
|
||||
|
||||
test('renders without model or provider', () => {
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue