From 9ce6e02bd8e2c57568ad4cb861ee7488c6d378f9 Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 03:33:20 +0100 Subject: [PATCH 01/27] fix: hydrate collected secrets for current session (#2788) secure_env_collect previously persisted secrets to their destination but left the running Node process unchanged. Extensions like Context7 read process.env directly, so newly collected keys did not work until restart. Hydrate process.env as soon as a secret is successfully applied, and cover the regression through collectSecretsFromManifest so the current session can use the key immediately. Closes #2685 --- .../extensions/get-secrets-from-user.ts | 8 ++++ .../gsd/tests/collect-from-manifest.test.ts | 39 +++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/src/resources/extensions/get-secrets-from-user.ts b/src/resources/extensions/get-secrets-from-user.ts index 9ff6cbb03..300852305 100644 --- a/src/resources/extensions/get-secrets-from-user.ts +++ b/src/resources/extensions/get-secrets-from-user.ts @@ -47,6 +47,12 @@ function shellEscapeSingle(value: string): string { return `'${value.replace(/'/g, `'\\''`)}'`; } +function hydrateProcessEnv(key: string, value: string): void { + // Make newly collected secrets immediately visible to the current session. + // Some extensions read process.env directly and do not reload .env on every call. + process.env[key] = value; +} + async function writeEnvKey(filePath: string, key: string, value: string): Promise { let content = ""; try { @@ -312,6 +318,7 @@ async function applySecrets( try { await writeEnvKey(opts.envFilePath, key, value); applied.push(key); + hydrateProcessEnv(key, value); } catch (err: any) { errors.push(`${key}: ${err.message}`); } @@ -330,6 +337,7 @@ async function applySecrets( errors.push(`${key}: ${result.stderr.slice(0, 200)}`); } else { applied.push(key); + hydrateProcessEnv(key, value); } } catch (err: any) { errors.push(`${key}: ${err.message}`); diff --git a/src/resources/extensions/gsd/tests/collect-from-manifest.test.ts b/src/resources/extensions/gsd/tests/collect-from-manifest.test.ts index c0a62946f..9ca2eecd9 100644 --- a/src/resources/extensions/gsd/tests/collect-from-manifest.test.ts +++ b/src/resources/extensions/gsd/tests/collect-from-manifest.test.ts @@ -227,6 +227,45 @@ test("collectSecretsFromManifest: manifest statuses are updated after collection "KEY_TO_SKIP should have status 'skipped' after user skipped it"); }); +test("collectSecretsFromManifest: applied keys hydrate process.env for the running session", async (t) => { + const { collectSecretsFromManifest } = await loadOrchestrator(); + + const tmp = makeTempDir("manifest-live-env"); + const envKey = "CONTEXT7_API_KEY"; + const saved = process.env[envKey]; + t.after(() => { + if (saved === undefined) delete process.env[envKey]; + else process.env[envKey] = saved; + rmSync(tmp, { recursive: true, force: true }); + }); + + delete process.env[envKey]; + + const manifest = makeManifest([ + { key: envKey, status: "pending" }, + ]); + await writeManifestFile(tmp, manifest); + + let callIndex = 0; + const mockCtx = { + cwd: tmp, + hasUI: true, + ui: { + custom: async (_factory: any) => { + callIndex++; + if (callIndex <= 1) return null; // summary screen dismiss + return "c7_live_test_key"; + }, + }, + }; + + const result = await collectSecretsFromManifest(tmp, "M001", mockCtx as any); + + assert.ok(result.applied.includes(envKey), "CONTEXT7_API_KEY should be applied"); + assert.equal(process.env[envKey], "c7_live_test_key", + "applied keys should be available through process.env without restarting"); +}); + // ─── showSecretsSummary: render output ──────────────────────────────────────── test("showSecretsSummary: produces lines with correct status glyphs for each entry status", async () => { From 112090706e2b6a5553d6eba208de08bc410d4a74 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 27 Mar 2026 02:50:02 +0000 Subject: [PATCH 02/27] release: v2.53.0 --- CHANGELOG.md | 26 ++++++++++++++++++++++++- native/npm/darwin-arm64/package.json | 2 +- native/npm/darwin-x64/package.json | 2 +- native/npm/linux-arm64-gnu/package.json | 2 +- native/npm/linux-x64-gnu/package.json | 2 +- native/npm/win32-x64-msvc/package.json | 2 +- package.json | 2 +- packages/pi-coding-agent/package.json | 2 +- pkg/package.json | 2 +- 9 files changed, 33 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 34aea54cc..2cf19d8e7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,29 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] +## [2.53.0] - 2026-03-27 + +### Added +- **vscode**: activity feed, workflow controls, session forking, enhanced code lens [2/3] (#2656) +- **gsd**: enable safety mechanisms by default (snapshots, pre-merge checks) (#2678) + +### Fixed +- hydrate collected secrets for current session (#2788) +- resolve stash pop conflicts and stop swallowing merge errors (#2780) +- treat any extracted verdict as terminal in isValidationTerminal (#2774) +- use localStorage for auth token to enable multi-tab usage (#2785) +- guard activeMilestone.id access in discuss and headless paths (#2776) +- clean up zombie parallel workers stuck in error state (#2782) +- relax milestone validation gate to accept prose evidence (#2779) +- write milestone reports to project root instead of worktree (#2778) +- auto-resolve build artifact conflicts in milestone merge (#2777) +- let rate-limit errors attempt model fallback before pausing (#2775) +- prevent gsd next from self-killing via stale crash lock (#2784) +- add shell flag for Windows spawn in VSCode extension (#2781) + +### Changed +- **gsd**: extract duplicated status guards and validation helpers (#2767) + ## [2.52.0] - 2026-03-27 ### Added @@ -2050,7 +2073,8 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ### Changed - License updated to MIT -[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.52.0...HEAD +[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.53.0...HEAD +[2.53.0]: https://github.com/gsd-build/gsd-2/compare/v2.52.0...v2.53.0 [2.52.0]: https://github.com/gsd-build/gsd-2/compare/v2.51.0...v2.52.0 [2.51.0]: https://github.com/gsd-build/gsd-2/compare/v2.50.0...v2.51.0 [2.50.0]: https://github.com/gsd-build/gsd-2/compare/v2.49.0...v2.50.0 diff --git a/native/npm/darwin-arm64/package.json b/native/npm/darwin-arm64/package.json index 223e08bd2..0911fe2e5 100644 --- a/native/npm/darwin-arm64/package.json +++ b/native/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-darwin-arm64", - "version": "2.52.0", + "version": "2.53.0", "description": "GSD native engine binary for macOS ARM64", "os": [ "darwin" diff --git a/native/npm/darwin-x64/package.json b/native/npm/darwin-x64/package.json index 66de49e14..12d435d25 100644 --- a/native/npm/darwin-x64/package.json +++ b/native/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-darwin-x64", - "version": "2.52.0", + "version": "2.53.0", "description": "GSD native engine binary for macOS Intel", "os": [ "darwin" diff --git a/native/npm/linux-arm64-gnu/package.json b/native/npm/linux-arm64-gnu/package.json index 39a0377ea..ec4d8e9c7 100644 --- a/native/npm/linux-arm64-gnu/package.json +++ b/native/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-linux-arm64-gnu", - "version": "2.52.0", + "version": "2.53.0", "description": "GSD native engine binary for Linux ARM64 (glibc)", "os": [ "linux" diff --git a/native/npm/linux-x64-gnu/package.json b/native/npm/linux-x64-gnu/package.json index 9db49f48d..60a9b25bc 100644 --- a/native/npm/linux-x64-gnu/package.json +++ b/native/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-linux-x64-gnu", - "version": "2.52.0", + "version": "2.53.0", "description": "GSD native engine binary for Linux x64 (glibc)", "os": [ "linux" diff --git a/native/npm/win32-x64-msvc/package.json b/native/npm/win32-x64-msvc/package.json index 09e714247..02311ed7d 100644 --- a/native/npm/win32-x64-msvc/package.json +++ b/native/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-win32-x64-msvc", - "version": "2.52.0", + "version": "2.53.0", "description": "GSD native engine binary for Windows x64 (MSVC)", "os": [ "win32" diff --git a/package.json b/package.json index 98c4e5977..6ce995ad9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "gsd-pi", - "version": "2.52.0", + "version": "2.53.0", "description": "GSD — Get Shit Done coding agent", "license": "MIT", "repository": { diff --git a/packages/pi-coding-agent/package.json b/packages/pi-coding-agent/package.json index 7d3cb624e..019803620 100644 --- a/packages/pi-coding-agent/package.json +++ b/packages/pi-coding-agent/package.json @@ -1,6 +1,6 @@ { "name": "@gsd/pi-coding-agent", - "version": "2.52.0", + "version": "2.53.0", "description": "Coding agent CLI (vendored from pi-mono)", "type": "module", "piConfig": { diff --git a/pkg/package.json b/pkg/package.json index 6921ef3fc..7457973b7 100644 --- a/pkg/package.json +++ b/pkg/package.json @@ -1,6 +1,6 @@ { "name": "@glittercowboy/gsd", - "version": "2.52.0", + "version": "2.53.0", "piConfig": { "name": "gsd", "configDir": ".gsd" From 5abf9652b3056d60dda92b4c5da685406eb3f4c7 Mon Sep 17 00:00:00 2001 From: Jordan Gaytan <48812427+astrogopher@users.noreply.github.com> Date: Thu, 26 Mar 2026 23:22:45 -0500 Subject: [PATCH 03/27] feat(parallel): add real-time TUI monitor dashboard with self-healing (#2799) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Zero-dependency Node.js script for monitoring parallel GSD auto-mode workers. Shows per-worker health, progress, cost, and current unit in a live-refreshing terminal dashboard. Reads from existing status.json, auto.lock, and worktree SQLite DBs — fully read-only by default. Features: - Live dashboard with per-worker panels (health dot, phase, slice/task progress bars) - Event feed showing task completions from all workers - Cost tracking with NDJSON fallback for respawned workers - Stale-error suppression (only shows errors from current session) - Auto-detect GSD loader path across install methods - --heal flag: opt-in self-healing that respawns dead workers with cooldown and retry limits - --once flag: single snapshot mode for scripting Complements the existing parallel-orchestrator.ts — this is a passive monitoring tool that reads the same status files the orchestrator writes. --- scripts/parallel-monitor.mjs | 852 +++++++++++++++++++++++++++++++++++ 1 file changed, 852 insertions(+) create mode 100755 scripts/parallel-monitor.mjs diff --git a/scripts/parallel-monitor.mjs b/scripts/parallel-monitor.mjs new file mode 100755 index 000000000..b29109682 --- /dev/null +++ b/scripts/parallel-monitor.mjs @@ -0,0 +1,852 @@ +#!/usr/bin/env node +/** + * GSD Parallel Worker Monitor + * + * Real-time TUI dashboard for monitoring parallel GSD auto-mode workers. + * Zero dependencies — uses raw ANSI escape codes, Node.js builtins only. + * + * Usage: + * node scripts/parallel-monitor.mjs # live dashboard, 5s refresh + * node scripts/parallel-monitor.mjs --interval 3 # faster refresh + * node scripts/parallel-monitor.mjs --once # single snapshot, then exit + * node scripts/parallel-monitor.mjs --heal # auto-respawn dead workers + * node scripts/parallel-monitor.mjs --heal --heal-retries 5 --heal-cooldown 60 + * + * Options: + * --interval Refresh interval in seconds (default: 5) + * --once Render once and exit (useful for scripting/piping) + * --heal Auto-respawn dead workers (opt-in, off by default) + * --heal-retries Max respawn attempts per worker (default: 3) + * --heal-cooldown Seconds between respawn attempts (default: 30) + * --dir Status file directory (default: .gsd/parallel) + * --root Project root (default: cwd) + * + * Data sources: + * .gsd/parallel/M0xx.status.json — heartbeat, cost, state (written by orchestrator) + * .gsd/worktrees/M0xx/.gsd/auto.lock — current unit type + ID (written by worker) + * .gsd/worktrees/M0xx/.gsd/gsd.db — task/slice completion (SQLite, queried via cli) + * .gsd/parallel/M0xx.stdout.log — NDJSON events (cost extraction, notify messages) + * .gsd/parallel/M0xx.stderr.log — error surfacing + * + * Health indicators: + * ● green — PID alive, fresh heartbeat (<30s) + * ● green — PID alive, heartbeat stale (respawned worker, file mtime used as proxy) + * ○ red — PID dead + * + * Self-healing (--heal): + * When a dead worker is detected, the monitor writes a temp shell script and launches + * a new headless auto-mode process in the worker's worktree with the correct env vars. + * Cooldown prevents rapid respawn loops. Gives up after --heal-retries consecutive + * failures. Resets retry count when a worker comes back alive. + */ + +import fs from 'node:fs'; +import path from 'node:path'; +import { execSync } from 'node:child_process'; + +// ─── Configuration ─────────────────────────────────────────────────────────── + +const args = process.argv.slice(2); +const INTERVAL_SEC = parseInt(getArg('--interval', '5'), 10); +const PARALLEL_DIR = getArg('--dir', '.gsd/parallel'); +const PROJECT_ROOT = getArg('--root', process.cwd()); +const ONE_SHOT = args.includes('--once'); +const HEAL_MODE = args.includes('--heal'); +const HEAL_MAX_RETRIES = parseInt(getArg('--heal-retries', '3'), 10); +const HEAL_COOLDOWN_SEC = parseInt(getArg('--heal-cooldown', '30'), 10); + +// Per-worker heal state: { lastAttempt: number, retries: number } +const healState = {}; + +function getArg(flag, defaultVal) { + const idx = args.indexOf(flag); + return idx !== -1 && args[idx + 1] ? args[idx + 1] : defaultVal; +} + +// ─── ANSI Helpers ──────────────────────────────────────────────────────────── + +const ESC = '\x1b['; +const RESET = `${ESC}0m`; +const BOLD = `${ESC}1m`; +const DIM = `${ESC}2m`; +const ITALIC = `${ESC}3m`; + +const FG = { + black: `${ESC}30m`, + red: `${ESC}31m`, + green: `${ESC}32m`, + yellow: `${ESC}33m`, + blue: `${ESC}34m`, + magenta: `${ESC}35m`, + cyan: `${ESC}36m`, + white: `${ESC}37m`, + gray: `${ESC}90m`, +}; + +const BG = { + black: `${ESC}40m`, + red: `${ESC}41m`, + green: `${ESC}42m`, + yellow: `${ESC}43m`, + blue: `${ESC}44m`, + white: `${ESC}47m`, +}; + +// Screen control +const CLEAR_SCREEN = `${ESC}2J${ESC}H`; +const HIDE_CURSOR = `${ESC}?25l`; +const SHOW_CURSOR = `${ESC}?25h`; +const SAVE_POS = `${ESC}s`; +const RESTORE_POS = `${ESC}u`; + +function moveTo(row, col) { return `${ESC}${row};${col}H`; } + +// ─── Data Reading ──────────────────────────────────────────────────────────── + +function readJsonSafe(filePath) { + try { + return JSON.parse(fs.readFileSync(filePath, 'utf-8')); + } catch { + return null; + } +} + +function isPidAlive(pid) { + try { + process.kill(pid, 0); + return true; + } catch { + return false; + } +} + +function discoverWorkers() { + const dir = path.resolve(PROJECT_ROOT, PARALLEL_DIR); + const worktreeDir = path.resolve(PROJECT_ROOT, '.gsd/worktrees'); + const mids = new Set(); + + // From status files + if (fs.existsSync(dir)) { + for (const f of fs.readdirSync(dir)) { + if (f.endsWith('.status.json')) mids.add(f.replace('.status.json', '')); + } + } + + // From stderr/stdout logs (manually respawned workers may lack status.json) + if (fs.existsSync(dir)) { + for (const f of fs.readdirSync(dir)) { + const m = f.match(/^(M\d+)\.(stderr|stdout)\.log$/); + if (m) mids.add(m[1]); + } + } + + // From worktree directories that have auto.lock (actively running) + if (fs.existsSync(worktreeDir)) { + for (const d of fs.readdirSync(worktreeDir)) { + if (d.startsWith('M') && fs.existsSync(path.join(worktreeDir, d, '.gsd', 'auto.lock'))) { + mids.add(d); + } + } + } + + return [...mids].sort(); +} + +function readWorkerStatus(mid) { + const statusPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.status.json`); + return readJsonSafe(statusPath); +} + +function readAutoLock(mid) { + const lockPath = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}/.gsd/auto.lock`); + return readJsonSafe(lockPath); +} + +function querySliceProgress(mid) { + const dbPath = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}/.gsd/gsd.db`); + if (!fs.existsSync(dbPath)) return []; + + try { + const sql = `SELECT s.id, s.status, COUNT(t.id), SUM(CASE WHEN t.status='complete' THEN 1 ELSE 0 END) FROM slices s LEFT JOIN tasks t ON s.milestone_id=t.milestone_id AND s.id=t.slice_id WHERE s.milestone_id='${mid}' GROUP BY s.id ORDER BY s.id`; + const out = execSync(`sqlite3 "${dbPath}" "${sql}"`, { timeout: 3000, encoding: 'utf-8' }).trim(); + if (!out) return []; + return out.split('\n').map(line => { + const [id, status, total, done] = line.split('|'); + return { id, status, total: parseInt(total, 10), done: parseInt(done || '0', 10) }; + }); + } catch { + return []; + } +} + +function readRecentEvents(mid, maxLines = 5) { + const stdoutPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stdout.log`); + const notifications = []; + const errors = []; + + // Parse NDJSON notify events from stdout log + if (fs.existsSync(stdoutPath)) { + try { + const stat = fs.statSync(stdoutPath); + const readSize = Math.min(stat.size, 32768); + const fd = fs.openSync(stdoutPath, 'r'); + const buf = Buffer.alloc(readSize); + fs.readSync(fd, buf, 0, readSize, Math.max(0, stat.size - readSize)); + fs.closeSync(fd); + const content = buf.toString('utf-8'); + const lines = content.trim().split('\n').slice(-100); + + for (const line of lines) { + try { + const obj = JSON.parse(line); + if (obj.method === 'notify' && obj.message) { + notifications.push({ ts: Date.now(), msg: obj.message, mid }); + } + } catch { /* skip */ } + } + } catch { /* skip */ } + } + + // Parse errors from stderr log — only new bytes since monitor started + const stderrPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stderr.log`); + if (fs.existsSync(stderrPath)) { + try { + const stat = fs.statSync(stderrPath); + + // Record baseline on first read — skip pre-existing errors + if (!(mid in stderrBaselines)) { + stderrBaselines[mid] = stat.size; + } + + const baseline = stderrBaselines[mid]; + const newBytes = stat.size - baseline; + + if (newBytes > 0) { + const readSize = Math.min(newBytes, 4096); + const fd = fs.openSync(stderrPath, 'r'); + const buf = Buffer.alloc(readSize); + fs.readSync(fd, buf, 0, readSize, Math.max(baseline, stat.size - readSize)); + fs.closeSync(fd); + const content = buf.toString('utf-8'); + const lines = content.trim().split('\n').slice(-10); + + for (const line of lines) { + if (line.includes('error') || line.includes('Error') || line.includes('WARN') || line.includes('exited')) { + errors.push({ ts: Date.now(), msg: line.trim(), mid, isError: true }); + } + } + } + } catch { /* skip */ } + } + + return { + notifications: notifications.slice(-maxLines), + errors: errors.slice(-3), + }; +} + +/** + * Extract accumulated cost from NDJSON stdout log (fallback when status.json is missing). + * Sums `message.usage.cost.total` from all `message_end` events. + */ +function extractCostFromNdjson(mid) { + const stdoutPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stdout.log`); + if (!fs.existsSync(stdoutPath)) return 0; + + try { + const content = fs.readFileSync(stdoutPath, 'utf-8'); + let total = 0; + for (const line of content.split('\n')) { + if (!line.includes('message_end')) continue; + try { + const obj = JSON.parse(line); + if (obj.type === 'message_end') { + const cost = obj.message?.usage?.cost?.total; + if (typeof cost === 'number') total += cost; + } + } catch { /* skip */ } + } + return total; + } catch { + return 0; + } +} + +// ─── Self-Healing ──────────────────────────────────────────────────────────── + +// Auto-detect the GSD loader path — works across npm global, homebrew, and local installs +function findGsdLoader() { + // 1. Check if we're running from inside the gsd-2 repo itself + const repoLoader = path.resolve(import.meta.dirname, '..', 'dist', 'loader.js'); + if (fs.existsSync(repoLoader)) return repoLoader; + + // 2. Check common global install locations + try { + const globalRoot = execSync('npm root -g', { encoding: 'utf-8', timeout: 3000 }).trim(); + const candidates = [ + path.join(globalRoot, 'gsd-pi', 'dist', 'loader.js'), + path.join(globalRoot, '@gsd', 'pi', 'dist', 'loader.js'), + ]; + for (const c of candidates) { + if (fs.existsSync(c)) return c; + } + } catch { /* skip */ } + + // 3. Try `which gsd` and resolve symlink + try { + const bin = execSync('which gsd', { encoding: 'utf-8', timeout: 3000 }).trim(); + if (bin) { + const realBin = fs.realpathSync(bin); + const loader = path.resolve(path.dirname(realBin), '..', 'dist', 'loader.js'); + if (fs.existsSync(loader)) return loader; + } + } catch { /* skip */ } + + return null; +} + +const GSD_LOADER = findGsdLoader(); + +/** + * Respawn a dead worker. Returns the new PID or null on failure. + * Uses nohup + output redirection so the child is fully detached. + */ +function respawnWorker(mid) { + const worktreeDir = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}`); + if (!fs.existsSync(worktreeDir)) return null; + if (!fs.existsSync(GSD_LOADER)) return null; + + const stdoutLog = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stdout.log`); + const stderrLog = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stderr.log`); + + try { + const env = [ + `GSD_MILESTONE_LOCK=${mid}`, + `GSD_PROJECT_ROOT=${PROJECT_ROOT}`, + `GSD_PARALLEL_WORKER=1`, + ].join(' '); + + // Use a shell script written to a temp file to avoid quoting hell + const script = [ + '#!/bin/bash', + `cd "${worktreeDir}"`, + `export GSD_MILESTONE_LOCK=${mid}`, + `export GSD_PROJECT_ROOT="${PROJECT_ROOT}"`, + `export GSD_PARALLEL_WORKER=1`, + `exec node "${GSD_LOADER}" headless --json auto > "${stdoutLog}" 2>> "${stderrLog}"`, + ].join('\n'); + + const scriptPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.respawn.sh`); + fs.writeFileSync(scriptPath, script, { mode: 0o755 }); + + // Launch detached via nohup + const result = execSync( + `nohup bash "${scriptPath}" > /dev/null 2>&1 & echo $!`, + { timeout: 5000, encoding: 'utf-8', cwd: worktreeDir } + ).trim(); + + // Clean up the temp script after a delay (process already forked) + setTimeout(() => { + try { fs.unlinkSync(scriptPath); } catch {} + }, 5000); + + const newPid = parseInt(result, 10); + return isNaN(newPid) ? null : newPid; + } catch (err) { + return null; + } +} + +/** + * Check all workers and respawn dead ones if --heal is active. + * Returns an array of heal events for the event feed. + */ +function healWorkers(workers) { + if (!HEAL_MODE) return []; + + const events = []; + const now = Date.now(); + + for (const wk of workers) { + if (wk.alive) { + // Worker is alive — reset its heal state on success + if (healState[wk.mid]) { + healState[wk.mid].retries = 0; + } + continue; + } + + // Worker is dead — check if we should attempt a respawn + if (!healState[wk.mid]) { + healState[wk.mid] = { lastAttempt: 0, retries: 0 }; + } + + const hs = healState[wk.mid]; + + // Give up after max retries + if (hs.retries >= HEAL_MAX_RETRIES) { + if (hs.retries === HEAL_MAX_RETRIES) { + events.push({ + ts: now, mid: wk.mid, + msg: `⛔ ${wk.mid}: gave up after ${HEAL_MAX_RETRIES} respawn attempts` + }); + hs.retries++; // Increment past max so this message only shows once + } + continue; + } + + // Cooldown — don't respawn too quickly + const elapsed = now - hs.lastAttempt; + if (elapsed < HEAL_COOLDOWN_SEC * 1000) { + const remaining = Math.ceil((HEAL_COOLDOWN_SEC * 1000 - elapsed) / 1000); + // Don't spam the feed — only note on first cooldown tick + continue; + } + + // Check the milestone isn't already complete + const allSlicesDone = wk.slices.length > 0 && wk.slices.every(s => s.status === 'complete'); + if (allSlicesDone) { + events.push({ ts: now, mid: wk.mid, msg: `✅ ${wk.mid}: all slices complete, no respawn needed` }); + hs.retries = HEAL_MAX_RETRIES + 1; // Don't try again + continue; + } + + // Attempt respawn + hs.lastAttempt = now; + hs.retries++; + + events.push({ + ts: now, mid: wk.mid, + msg: `🔄 ${wk.mid}: respawning (attempt ${hs.retries}/${HEAL_MAX_RETRIES})...` + }); + + const newPid = respawnWorker(wk.mid); + + if (newPid) { + events.push({ + ts: now, mid: wk.mid, + msg: `🟢 ${wk.mid}: respawned as PID ${newPid}` + }); + // Reset stderr baseline so we don't show old errors + delete stderrBaselines[wk.mid]; + } else { + events.push({ + ts: now, mid: wk.mid, isError: true, + msg: `❌ ${wk.mid}: respawn failed` + }); + } + } + + return events; +} + +// ─── Formatting Helpers ────────────────────────────────────────────────────── + +function formatDuration(ms) { + if (!ms || ms < 0) return '--:--'; + const totalSec = Math.floor(ms / 1000); + const h = Math.floor(totalSec / 3600); + const m = Math.floor((totalSec % 3600) / 60); + const s = totalSec % 60; + if (h > 0) return `${h}h${String(m).padStart(2, '0')}m`; + return `${String(m).padStart(2, '0')}m${String(s).padStart(2, '0')}s`; +} + +function formatCost(cost) { + if (cost == null) return '$-.--'; + return `$${cost.toFixed(2)}`; +} + +function healthColor(heartbeatAge, alive) { + if (!alive) return 'red'; + // PID alive is the strongest signal — worker is running + if (heartbeatAge < 30000) return 'green'; + // Alive but stale heartbeat — either respawned (no orchestrator writing status.json) + // or potentially stuck. Show green since headless idle timeout (120s) kills stuck workers. + if (alive) return 'green'; + return 'red'; +} + +function healthIcon(color) { + switch (color) { + case 'green': return '●'; + case 'yellow': return '◐'; + case 'red': return '○'; + default: return '?'; + } +} + +function unitTypeLabel(unitType) { + const labels = { + 'execute-task': 'EXEC', + 'research-slice': 'RSRCH', + 'plan-slice': 'PLAN', + 'complete-slice': 'DONE', + 'complete-task': 'DONE', + 'reassess': 'ASSESS', + 'validate': 'VALID', + }; + return labels[unitType] || (unitType || '---').toUpperCase().slice(0, 5); +} + +function progressBar(done, total, width = 20) { + if (total === 0) return `${'░'.repeat(width)}`; + const filled = Math.round((done / total) * width); + const empty = width - filled; + return `${'█'.repeat(filled)}${'░'.repeat(empty)}`; +} + +function pad(str, width) { + const s = String(str); + return s.length >= width ? s.slice(0, width) : s + ' '.repeat(width - s.length); +} + +function rpad(str, width) { + const s = String(str); + return s.length >= width ? s.slice(0, width) : ' '.repeat(width - s.length) + s; +} + +function truncate(str, maxLen) { + if (str.length <= maxLen) return str; + return str.slice(0, maxLen - 1) + '…'; +} + +/** + * Get recently completed tasks/slices from the worktree DB for the event feed. + */ +function queryRecentCompletions(mid) { + const dbPath = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}/.gsd/gsd.db`); + if (!fs.existsSync(dbPath)) return []; + + try { + // Completed tasks with timestamps, most recent first + const sql = `SELECT id, slice_id, one_liner, completed_at FROM tasks WHERE milestone_id='${mid}' AND status='complete' AND completed_at IS NOT NULL ORDER BY completed_at DESC LIMIT 5`; + const out = execSync(`sqlite3 "${dbPath}" "${sql}"`, { timeout: 3000, encoding: 'utf-8' }).trim(); + if (!out) return []; + return out.split('\n').map(line => { + const [taskId, sliceId, oneLiner, completedAt] = line.split('|'); + return { + ts: completedAt ? new Date(completedAt).getTime() : Date.now(), + msg: `✓ ${mid}/${sliceId}/${taskId}${oneLiner ? ': ' + oneLiner : ''}`, + mid, + }; + }); + } catch { + return []; + } +} + +// ─── Rendering ─────────────────────────────────────────────────────────────── + +const COLS = Math.max(process.stdout.columns || 100, 80); +const ROWS = Math.max(process.stdout.rows || 40, 20); + +let lastEventFeed = []; // Persisted across renders +const stderrBaselines = {}; // mid → file size at monitor startup (skip pre-existing errors) + +function collectWorkerData() { + const mids = discoverWorkers(); + const workers = []; + + for (const mid of mids) { + const status = readWorkerStatus(mid); + const lock = readAutoLock(mid); + const slices = querySliceProgress(mid); + const { notifications, errors } = readRecentEvents(mid, 3); + + // Prefer auto.lock PID (written by the running worker) over status.json PID + // (written by the orchestrator, stale after respawn) + const pid = lock?.pid || status?.pid; + const alive = pid ? isPidAlive(pid) : false; + // Heartbeat: prefer status.json if its PID matches (orchestrator-managed), + // otherwise fall back to stdout.log mtime (respawned workers write NDJSON continuously) + let heartbeatAge = Infinity; + const statusPidMatches = status?.pid && status.pid === pid; + if (status?.lastHeartbeat && statusPidMatches) { + heartbeatAge = Date.now() - status.lastHeartbeat; + } else { + // Check stdout/stderr log mtime as proxy heartbeat + const stdoutLog = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stdout.log`); + const stderrLog = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stderr.log`); + try { + const mtimes = []; + if (fs.existsSync(stdoutLog)) mtimes.push(fs.statSync(stdoutLog).mtimeMs); + if (fs.existsSync(stderrLog)) mtimes.push(fs.statSync(stderrLog).mtimeMs); + if (lock?.unitStartedAt) mtimes.push(new Date(lock.unitStartedAt).getTime()); + if (mtimes.length > 0) heartbeatAge = Date.now() - Math.max(...mtimes); + } catch { /* skip */ } + } + + // Cost: prefer status.json, fall back to NDJSON log parsing + let cost = status?.cost || 0; + if (cost === 0) { + cost = extractCostFromNdjson(mid); + } + + const totalTasks = slices.reduce((sum, s) => sum + s.total, 0); + const doneTasks = slices.reduce((sum, s) => sum + s.done, 0); + const doneSlices = slices.filter(s => s.status === 'complete').length; + const totalSlices = slices.length; + + // Current unit from auto.lock (more accurate than status.json currentUnit) + const currentUnit = lock?.unitId || status?.currentUnit || null; + const unitType = lock?.unitType || null; + const unitStarted = lock?.unitStartedAt ? new Date(lock.unitStartedAt).getTime() : null; + + // If no lock and worker is dead, show nothing (not a misleading "START" label) + const showUnit = currentUnit || (alive ? null : null); + + const elapsed = status?.startedAt + ? Date.now() - status.startedAt + : (lock?.startedAt ? Date.now() - new Date(lock.startedAt).getTime() : 0); + + workers.push({ + mid, + pid, + alive, + state: alive ? 'running' : (status?.state || 'dead'), + cost, + heartbeatAge, + health: healthColor(heartbeatAge, alive), + currentUnit, + unitType, + unitElapsed: unitStarted ? Date.now() - unitStarted : 0, + elapsed, + totalTasks, + doneTasks, + totalSlices, + doneSlices, + slices, + notifications, + errors, + }); + } + + return workers; +} + +function render(workers) { + const buf = []; + const w = COLS; + + // ── Header ── + buf.push(''); + const title = ' GSD Parallel Monitor '; + const titlePad = Math.max(0, Math.floor((w - title.length) / 2)); + buf.push( + `${' '.repeat(titlePad)}${BOLD}${BG.blue}${FG.white}${title}${RESET}` + ); + + const now = new Date().toLocaleTimeString(); + const totalCost = workers.reduce((s, w) => s + w.cost, 0); + const aliveCount = workers.filter(w => w.alive).length; + + const healTag = HEAL_MODE ? ` │ ${FG.green}⚕ heal${RESET}${DIM}` : ''; + buf.push( + `${DIM} ${now} │ ${aliveCount}/${workers.length} alive │ Total: ${RESET}${BOLD}${formatCost(totalCost)}${RESET}${DIM} │ Refresh: ${INTERVAL_SEC}s${healTag}${RESET}` + ); + buf.push(`${DIM}${'─'.repeat(w)}${RESET}`); + + // ── Worker Panels ── + if (workers.length === 0) { + buf.push(''); + buf.push(` ${FG.yellow}No workers found in ${PARALLEL_DIR}/${RESET}`); + buf.push(` ${DIM}Waiting for .gsd/parallel/*.status.json files...${RESET}`); + } else { + for (const wk of workers) { + buf.push(''); + + // Worker header: milestone ID + health + state + const icon = healthIcon(wk.health); + const hc = FG[wk.health]; + const stateLabel = wk.alive + ? (wk.state === 'running' ? `${FG.green}RUNNING${RESET}` : `${FG.yellow}${wk.state.toUpperCase()}${RESET}`) + : `${FG.red}${BOLD}DEAD${RESET}`; + + const heartbeatText = wk.heartbeatAge === Infinity + ? 'never' + : formatDuration(wk.heartbeatAge) + ' ago'; + + buf.push( + ` ${hc}${icon}${RESET} ${BOLD}${wk.mid}${RESET} ${stateLabel} ${DIM}PID ${wk.pid || '?'}${RESET} ${DIM}│${RESET} ${DIM}elapsed${RESET} ${formatDuration(wk.elapsed)} ${DIM}│${RESET} ${DIM}cost${RESET} ${BOLD}${formatCost(wk.cost)}${RESET} ${DIM}│${RESET} ${DIM}heartbeat${RESET} ${hc}${heartbeatText}${RESET}` + ); + + // Current unit + if (wk.currentUnit) { + const phaseColor = wk.unitType === 'execute-task' ? FG.cyan + : wk.unitType === 'research-slice' ? FG.magenta + : wk.unitType === 'plan-slice' ? FG.blue + : wk.unitType?.includes('complete') ? FG.green + : FG.white; + + buf.push( + ` ${DIM}▸${RESET} ${phaseColor}${unitTypeLabel(wk.unitType)}${RESET} ${wk.currentUnit} ${DIM}(${formatDuration(wk.unitElapsed)})${RESET}` + ); + } else if (!wk.alive) { + buf.push(` ${DIM}▸ ${FG.red}stopped${RESET}`); + } else { + buf.push(` ${DIM}▸ idle / between units${RESET}`); + } + + // Slice progress grid + if (wk.slices.length > 0) { + const sliceChips = wk.slices.map(s => { + const pct = s.total > 0 ? s.done / s.total : 0; + let color; + if (s.status === 'complete') color = FG.green; + else if (pct > 0) color = FG.yellow; + else color = FG.gray; + + const label = `${s.id}:${s.done}/${s.total}`; + return `${color}${label}${RESET}`; + }); + + buf.push(` ${DIM}slices${RESET} ${sliceChips.join(' ')}`); + + // Overall progress bar + const bar = progressBar(wk.doneTasks, wk.totalTasks, 30); + const pctStr = wk.totalTasks > 0 + ? `${Math.round((wk.doneTasks / wk.totalTasks) * 100)}%` + : '0%'; + buf.push( + ` ${DIM}tasks${RESET} ${FG.green}${bar}${RESET} ${wk.doneTasks}/${wk.totalTasks} ${DIM}(${pctStr})${RESET} ${DIM}│${RESET} ${DIM}slices done${RESET} ${wk.doneSlices}/${wk.totalSlices}` + ); + } + + // Recent errors from this worker + if (wk.errors.length > 0) { + for (const err of wk.errors.slice(-2)) { + buf.push(` ${FG.red}⚠ ${truncate(err.msg, w - 10)}${RESET}`); + } + } + } + } + + // ── Separator ── + buf.push(''); + buf.push(`${DIM}${'─'.repeat(w)}${RESET}`); + + // ── Event Feed ── + buf.push(` ${BOLD}Recent Events${RESET}`); + + // Collect new notification events from all workers + for (const wk of workers) { + for (const evt of wk.notifications) { + if (!lastEventFeed.some(e => e.msg === evt.msg && e.mid === evt.mid)) { + lastEventFeed.push(evt); + } + } + } + + // Also add recent task completions from the DB + for (const wk of workers) { + const completions = queryRecentCompletions(wk.mid); + for (const evt of completions) { + if (!lastEventFeed.some(e => e.msg === evt.msg)) { + lastEventFeed.push(evt); + } + } + } + + // Sort by timestamp and keep last 10 + lastEventFeed.sort((a, b) => a.ts - b.ts); + lastEventFeed = lastEventFeed.slice(-10); + + if (lastEventFeed.length === 0) { + buf.push(` ${DIM}No events yet...${RESET}`); + } else { + for (const evt of lastEventFeed.slice(-6)) { + const midTag = `${FG.cyan}${evt.mid}${RESET}`; + buf.push(` ${DIM}│${RESET} ${midTag} ${truncate(evt.msg, w - 12)}`); + } + } + + // ── Completion Check ── + const allDone = workers.length > 0 && workers.every(w => !w.alive); + if (allDone) { + buf.push(''); + buf.push(`${DIM}${'─'.repeat(w)}${RESET}`); + buf.push(''); + const doneMsg = ' ALL WORKERS COMPLETE '; + const donePad = Math.max(0, Math.floor((w - doneMsg.length) / 2)); + buf.push( + `${' '.repeat(donePad)}${BOLD}${BG.green}${FG.black}${doneMsg}${RESET}` + ); + buf.push(''); + for (const wk of workers) { + buf.push(` ${wk.mid} ${formatCost(wk.cost)} ${DIM}│${RESET} ${wk.doneSlices}/${wk.totalSlices} slices ${wk.doneTasks}/${wk.totalTasks} tasks ${DIM}│${RESET} ${formatDuration(wk.elapsed)}`); + } + const totalCostFinal = workers.reduce((s, w) => s + w.cost, 0); + buf.push(` ${BOLD}Total: ${formatCost(totalCostFinal)}${RESET}`); + } + + // ── Footer ── + buf.push(''); + const healInfo = HEAL_MODE + ? ` │ heal: ${HEAL_COOLDOWN_SEC}s cooldown, ${HEAL_MAX_RETRIES} max retries` + : ''; + buf.push(` ${DIM}Ctrl+C to exit${allDone ? ' (monitoring stopped)' : ''}${healInfo}${RESET}`); + + // Write to screen + process.stdout.write(CLEAR_SCREEN); + process.stdout.write(buf.join('\n') + '\n'); + + return allDone; +} + +// ─── Main Loop ─────────────────────────────────────────────────────────────── + +function main() { + process.stdout.write(HIDE_CURSOR); + + // Handle resize + process.stdout.on('resize', () => { + // COLS/ROWS are recalculated on next render + }); + + // Graceful exit + const cleanup = () => { + process.stdout.write(SHOW_CURSOR); + process.stdout.write(CLEAR_SCREEN); + console.log('Monitor stopped.'); + process.exit(0); + }; + + process.on('SIGINT', cleanup); + process.on('SIGTERM', cleanup); + + // Initial render + const workers = collectWorkerData(); + const healEvents = healWorkers(workers); + for (const evt of healEvents) lastEventFeed.push(evt); + let done = render(workers); + + if (done || ONE_SHOT) { + process.stdout.write(SHOW_CURSOR); + return; + } + + // Refresh loop + const timer = setInterval(() => { + try { + const workers = collectWorkerData(); + const healEvents = healWorkers(workers); + for (const evt of healEvents) lastEventFeed.push(evt); + done = render(workers); + + if (done) { + clearInterval(timer); + // Keep showing final state for 3 seconds then exit + setTimeout(() => { + process.stdout.write(SHOW_CURSOR); + process.exit(0); + }, 3000); + } + } catch (err) { + // Don't crash the monitor on transient read errors + process.stderr.write(`Monitor error: ${err.message}\n`); + } + }, INTERVAL_SEC * 1000); +} + +main(); From a91b8bec34d25795c01762ad92fecfe883ca53a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?T=C3=82CHES?= Date: Thu, 26 Mar 2026 23:33:22 -0600 Subject: [PATCH 04/27] feat: Headless Integration Hardening & Release (M002) (#2811) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Migrated headless orchestrator to use execution_complete events,… - "src/headless.ts" - "src/headless-ui.ts" - "src/tests/headless-v2-migration.test.ts" GSD-Task: S06/T02 * test: Wired pi-coding-agent to re-export JSONL utils from @gsd/rpc-clie… - "packages/pi-coding-agent/src/modes/rpc/jsonl.ts" - "packages/pi-coding-agent/package.json" - "packages/rpc-client/src/index.ts" - "packages/rpc-client/src/jsonl.ts" - "packages/rpc-client/src/rpc-client.ts" - "packages/rpc-client/src/rpc-types.ts" - "packages/rpc-client/src/rpc-client.test.ts" - "packages/rpc-client/package.json" GSD-Task: S06/T03 * feat: Wire --resume flag to resolve session IDs via prefix matching and… - "src/headless.ts" - "dist/headless.js" GSD-Task: S01/T01 * test: Added 5 e2e integration tests proving headless JSON batch, SIGINT… - "src/tests/integration/e2e-headless.test.ts" GSD-Task: S01/T02 * test: Updated @gsd/rpc-client and @gsd/mcp-server to 2.52.0 with publis… - "packages/rpc-client/package.json" - "packages/mcp-server/package.json" - "packages/rpc-client/.npmignore" - "packages/mcp-server/.npmignore" GSD-Task: S02/T01 * chore: auto-commit after complete-milestone GSD-Unit: M002-gzq23a * fix: revert jsonl.ts to inline implementation — @gsd-build/rpc-client not available at source-level test time in CI The re-export from @gsd-build/rpc-client fails in CI because tests run against TypeScript source (--experimental-strip-types) before any build step. The npm dependency resolves to node_modules/ which requires dist/ to exist. Reverting to the original inline implementation eliminates the cross-package dependency for source-level imports. --- package-lock.json | 28 +- packages/mcp-server/.npmignore | 1 + packages/mcp-server/README.md | 12 +- packages/mcp-server/package.json | 18 +- packages/mcp-server/src/cli.ts | 2 +- packages/mcp-server/src/index.ts | 2 +- packages/mcp-server/src/mcp-server.test.ts | 4 +- packages/mcp-server/src/session-manager.ts | 4 +- packages/mcp-server/src/types.ts | 2 +- packages/rpc-client/.npmignore | 1 + packages/rpc-client/README.md | 125 ++++ packages/rpc-client/examples/basic-usage.ts | 13 + packages/rpc-client/package.json | 20 +- packages/rpc-client/src/index.ts | 10 + packages/rpc-client/src/jsonl.ts | 64 ++ packages/rpc-client/src/rpc-client.test.ts | 568 +++++++++++++++++ packages/rpc-client/src/rpc-client.ts | 666 ++++++++++++++++++++ packages/rpc-client/src/rpc-types.ts | 399 ++++++++++++ packages/rpc-client/tsconfig.examples.json | 17 + packages/rpc-client/tsconfig.json | 24 + src/headless-ui.ts | 33 +- src/headless.ts | 218 ++++++- src/tests/headless-v2-migration.test.ts | 462 ++++++++++++++ src/tests/integration/e2e-headless.test.ts | 385 +++++++++++ 24 files changed, 2995 insertions(+), 83 deletions(-) create mode 100644 packages/mcp-server/.npmignore create mode 100644 packages/rpc-client/.npmignore create mode 100644 packages/rpc-client/README.md create mode 100644 packages/rpc-client/examples/basic-usage.ts create mode 100644 packages/rpc-client/src/index.ts create mode 100644 packages/rpc-client/src/jsonl.ts create mode 100644 packages/rpc-client/src/rpc-client.test.ts create mode 100644 packages/rpc-client/src/rpc-client.ts create mode 100644 packages/rpc-client/src/rpc-types.ts create mode 100644 packages/rpc-client/tsconfig.examples.json create mode 100644 packages/rpc-client/tsconfig.json create mode 100644 src/tests/headless-v2-migration.test.ts create mode 100644 src/tests/integration/e2e-headless.test.ts diff --git a/package-lock.json b/package-lock.json index 79eb7b36f..9a9a89a5b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "gsd-pi", - "version": "2.51.0", + "version": "2.52.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "gsd-pi", - "version": "2.51.0", + "version": "2.52.0", "hasInstallScript": true, "license": "MIT", "workspaces": [ @@ -1815,10 +1815,14 @@ "win32" ] }, - "node_modules/@gsd/mcp-server": { + "node_modules/@gsd-build/mcp-server": { "resolved": "packages/mcp-server", "link": true }, + "node_modules/@gsd-build/rpc-client": { + "resolved": "packages/rpc-client", + "link": true + }, "node_modules/@gsd/native": { "resolved": "packages/native", "link": true @@ -1839,10 +1843,6 @@ "resolved": "packages/pi-tui", "link": true }, - "node_modules/@gsd/rpc-client": { - "resolved": "packages/rpc-client", - "link": true - }, "node_modules/@gsd/studio": { "resolved": "studio", "link": true @@ -9150,10 +9150,11 @@ } }, "packages/mcp-server": { - "name": "@gsd/mcp-server", - "version": "2.51.0", + "name": "@gsd-build/mcp-server", + "version": "2.52.0", + "license": "MIT", "dependencies": { - "@gsd/rpc-client": "*", + "@gsd-build/rpc-client": "^2.52.0", "@modelcontextprotocol/sdk": "^1.27.1", "zod": "^4.0.0" }, @@ -9218,7 +9219,7 @@ }, "packages/pi-coding-agent": { "name": "@gsd/pi-coding-agent", - "version": "2.51.0", + "version": "2.52.0", "dependencies": { "@mariozechner/jiti": "^2.6.2", "@silvia-odwyer/photon-node": "^0.3.4", @@ -9261,8 +9262,9 @@ } }, "packages/rpc-client": { - "name": "@gsd/rpc-client", - "version": "2.51.0", + "name": "@gsd-build/rpc-client", + "version": "2.52.0", + "license": "MIT", "engines": { "node": ">=22.0.0" } diff --git a/packages/mcp-server/.npmignore b/packages/mcp-server/.npmignore new file mode 100644 index 000000000..5aedf8f6e --- /dev/null +++ b/packages/mcp-server/.npmignore @@ -0,0 +1 @@ +dist/*.test.* diff --git a/packages/mcp-server/README.md b/packages/mcp-server/README.md index 821cf7002..fd4783ea9 100644 --- a/packages/mcp-server/README.md +++ b/packages/mcp-server/README.md @@ -1,4 +1,4 @@ -# @gsd/mcp-server +# @gsd-build/mcp-server MCP server exposing GSD orchestration tools for Claude Code, Cursor, and other MCP-compatible clients. @@ -7,7 +7,7 @@ Start GSD auto-mode sessions, poll progress, resolve blockers, and retrieve resu ## Installation ```bash -npm install @gsd/mcp-server +npm install @gsd-build/mcp-server ``` Or with the monorepo workspace: @@ -180,12 +180,12 @@ Resolve a pending blocker in a session by sending a response to the blocked UI r ``` ┌─────────────────┐ stdio ┌──────────────────┐ -│ MCP Client │ ◄────────────► │ @gsd/mcp-server │ +│ MCP Client │ ◄────────────► │ @gsd-build/mcp-server │ │ (Claude Code, │ JSON-RPC │ │ │ Cursor, etc.) │ │ SessionManager │ └─────────────────┘ │ │ │ │ ▼ │ - │ @gsd/rpc-client │ + │ @gsd-build/rpc-client │ │ │ │ │ ▼ │ │ GSD CLI (child │ @@ -193,9 +193,9 @@ Resolve a pending blocker in a session by sending a response to the blocked UI r └──────────────────┘ ``` -- **@gsd/mcp-server** — MCP protocol adapter. Translates MCP tool calls into SessionManager operations. +- **@gsd-build/mcp-server** — MCP protocol adapter. Translates MCP tool calls into SessionManager operations. - **SessionManager** — Manages RpcClient lifecycle. One session per project directory. Tracks events in a ring buffer (last 50), detects blockers, accumulates cost. -- **@gsd/rpc-client** — Low-level RPC client that spawns and communicates with the GSD CLI process via JSON-RPC over stdio. +- **@gsd-build/rpc-client** — Low-level RPC client that spawns and communicates with the GSD CLI process via JSON-RPC over stdio. ## License diff --git a/packages/mcp-server/package.json b/packages/mcp-server/package.json index b55b9904d..449a074de 100644 --- a/packages/mcp-server/package.json +++ b/packages/mcp-server/package.json @@ -1,7 +1,16 @@ { - "name": "@gsd/mcp-server", - "version": "2.51.0", + "name": "@gsd-build/mcp-server", + "version": "2.52.0", "description": "MCP server exposing GSD orchestration tools for Claude Code, Cursor, and other MCP clients", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/gsd-build/gsd-2.git", + "directory": "packages/mcp-server" + }, + "publishConfig": { + "access": "public" + }, "type": "module", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -20,7 +29,7 @@ }, "dependencies": { "@modelcontextprotocol/sdk": "^1.27.1", - "@gsd/rpc-client": "*", + "@gsd-build/rpc-client": "^2.52.0", "zod": "^4.0.0" }, "devDependencies": { @@ -31,6 +40,7 @@ "node": ">=22.0.0" }, "files": [ - "dist" + "dist", + "!dist/**/*.test.*" ] } diff --git a/packages/mcp-server/src/cli.ts b/packages/mcp-server/src/cli.ts index b483ac2c2..eb4252d5a 100644 --- a/packages/mcp-server/src/cli.ts +++ b/packages/mcp-server/src/cli.ts @@ -1,7 +1,7 @@ #!/usr/bin/env node /** - * @gsd/mcp-server CLI — stdio transport entry point. + * @gsd-build/mcp-server CLI — stdio transport entry point. * * Connects the MCP server to stdin/stdout for use by Claude Code, * Cursor, and other MCP-compatible clients. diff --git a/packages/mcp-server/src/index.ts b/packages/mcp-server/src/index.ts index f65ef29ac..7963926fc 100644 --- a/packages/mcp-server/src/index.ts +++ b/packages/mcp-server/src/index.ts @@ -1,5 +1,5 @@ /** - * @gsd/mcp-server — MCP server for GSD orchestration. + * @gsd-build/mcp-server — MCP server for GSD orchestration. */ export { SessionManager } from './session-manager.js'; diff --git a/packages/mcp-server/src/mcp-server.test.ts b/packages/mcp-server/src/mcp-server.test.ts index 7f71d4fb2..6d7ce156e 100644 --- a/packages/mcp-server/src/mcp-server.test.ts +++ b/packages/mcp-server/src/mcp-server.test.ts @@ -1,7 +1,7 @@ /** - * @gsd/mcp-server — Integration and unit tests. + * @gsd-build/mcp-server — Integration and unit tests. * - * Strategy: We cannot mock @gsd/rpc-client at the module level without + * Strategy: We cannot mock @gsd-build/rpc-client at the module level without * --experimental-test-module-mocks. Instead we test by: * * 1. Subclassing SessionManager to inject a mock client factory diff --git a/packages/mcp-server/src/session-manager.ts b/packages/mcp-server/src/session-manager.ts index 6c1ecf5db..841941196 100644 --- a/packages/mcp-server/src/session-manager.ts +++ b/packages/mcp-server/src/session-manager.ts @@ -8,8 +8,8 @@ import { execSync } from 'node:child_process'; import { resolve } from 'node:path'; -import { RpcClient } from '@gsd/rpc-client'; -import type { SdkAgentEvent, RpcInitResult, RpcCostUpdateEvent, RpcExtensionUIRequest } from '@gsd/rpc-client'; +import { RpcClient } from '@gsd-build/rpc-client'; +import type { SdkAgentEvent, RpcInitResult, RpcCostUpdateEvent, RpcExtensionUIRequest } from '@gsd-build/rpc-client'; import type { ManagedSession, ExecuteOptions, diff --git a/packages/mcp-server/src/types.ts b/packages/mcp-server/src/types.ts index 43cf3671e..fa12c9f61 100644 --- a/packages/mcp-server/src/types.ts +++ b/packages/mcp-server/src/types.ts @@ -2,7 +2,7 @@ * MCP Server types — session lifecycle and orchestration. */ -import type { RpcClient, SdkAgentEvent, RpcCostUpdateEvent, RpcExtensionUIRequest } from '@gsd/rpc-client'; +import type { RpcClient, SdkAgentEvent, RpcCostUpdateEvent, RpcExtensionUIRequest } from '@gsd-build/rpc-client'; // --------------------------------------------------------------------------- // Session Status diff --git a/packages/rpc-client/.npmignore b/packages/rpc-client/.npmignore new file mode 100644 index 000000000..5aedf8f6e --- /dev/null +++ b/packages/rpc-client/.npmignore @@ -0,0 +1 @@ +dist/*.test.* diff --git a/packages/rpc-client/README.md b/packages/rpc-client/README.md new file mode 100644 index 000000000..6dcad70e6 --- /dev/null +++ b/packages/rpc-client/README.md @@ -0,0 +1,125 @@ +# @gsd-build/rpc-client + +Standalone RPC client SDK for GSD. Spawn the agent process, perform a v2 protocol handshake, send commands, and consume typed events via an async generator — all in a few lines of TypeScript. + +Zero internal dependencies. Ships its own inlined types. + +## Installation + +```bash +npm install @gsd-build/rpc-client +``` + +## Quick Start + +```typescript +import { RpcClient } from '@gsd-build/rpc-client'; + +const client = new RpcClient({ cwd: process.cwd() }); +await client.start(); +const { sessionId } = await client.init({ clientId: 'my-app' }); +console.log(`Session: ${sessionId}`); + +await client.prompt('Create a hello world script'); +for await (const event of client.events()) { + if (event.type === 'execution_complete') break; + console.log(event.type); +} +await client.shutdown(); +``` + +## API + +### Constructor + +```typescript +const client = new RpcClient(options?: RpcClientOptions); +``` + +| Option | Type | Description | +|------------|--------------------------|------------------------------------------| +| `cliPath` | `string` | Path to the CLI entry point | +| `cwd` | `string` | Working directory for the agent | +| `env` | `Record` | Environment variables | +| `provider` | `string` | AI provider (e.g. `"anthropic"`) | +| `model` | `string` | Model ID (e.g. `"claude-sonnet"`) | +| `args` | `string[]` | Additional CLI arguments | + +### Lifecycle + +| Method | Description | +|---------------|------------------------------------------------| +| `start()` | Spawn the agent process | +| `init(opts?)` | v2 handshake — returns `sessionId`, capabilities | +| `shutdown()` | Graceful shutdown | +| `stop()` | Force-kill the process | + +### Commands + +| Method | Description | +|--------------------------------|----------------------------------------| +| `prompt(message, images?)` | Send a prompt | +| `steer(message, images?)` | Interrupt with a steering message | +| `followUp(message, images?)` | Queue a follow-up message | +| `abort()` | Abort current operation | +| `subscribe(events)` | Subscribe to event types (`["*"]` for all) | + +### Events + +```typescript +// Async generator — recommended +for await (const event of client.events()) { + console.log(event.type); +} + +// Callback-based +const unsubscribe = client.onEvent((event) => { + console.log(event.type); +}); +``` + +### Helpers + +| Method | Description | +|---------------------------------------|------------------------------------------| +| `waitForIdle(timeout?)` | Wait for `agent_end` event | +| `collectEvents(timeout?)` | Collect events until idle | +| `promptAndWait(message, images?, t?)` | Send prompt and collect events | + +### Session & Model + +| Method | Description | +|----------------------------------|-----------------------------------| +| `getState()` | Get session state | +| `setModel(provider, modelId)` | Set model | +| `cycleModel()` | Cycle to next model | +| `getAvailableModels()` | List available models | +| `setThinkingLevel(level)` | Set thinking level | +| `cycleThinkingLevel()` | Cycle thinking level | +| `compact(instructions?)` | Compact session context | +| `getSessionStats()` | Get session statistics | +| `bash(command)` | Execute a bash command | +| `newSession(parent?)` | Start a new session | +| `sendUIResponse(id, response)` | Respond to extension UI requests | + +## Type Exports + +All protocol types are exported from the package root: + +```typescript +import type { + RpcCommand, + RpcResponse, + RpcInitResult, + RpcExecutionCompleteEvent, + RpcCostUpdateEvent, + RpcV2Event, + SessionStats, + SdkAgentEvent, + RpcClientOptions, +} from '@gsd-build/rpc-client'; +``` + +## License + +MIT diff --git a/packages/rpc-client/examples/basic-usage.ts b/packages/rpc-client/examples/basic-usage.ts new file mode 100644 index 000000000..3248799b4 --- /dev/null +++ b/packages/rpc-client/examples/basic-usage.ts @@ -0,0 +1,13 @@ +import { RpcClient } from '@gsd-build/rpc-client'; + +const client = new RpcClient({ cwd: process.cwd() }); +await client.start(); +const { sessionId } = await client.init({ clientId: 'my-app' }); +console.log(`Session: ${sessionId}`); + +await client.prompt('Create a hello world script'); +for await (const event of client.events()) { + if (event.type === 'execution_complete') break; + console.log(event.type); +} +await client.shutdown(); diff --git a/packages/rpc-client/package.json b/packages/rpc-client/package.json index 50461c856..934be48ab 100644 --- a/packages/rpc-client/package.json +++ b/packages/rpc-client/package.json @@ -1,7 +1,16 @@ { - "name": "@gsd/rpc-client", - "version": "2.51.0", + "name": "@gsd-build/rpc-client", + "version": "2.52.0", "description": "Standalone RPC client SDK for GSD — zero internal dependencies", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/gsd-build/gsd-2.git", + "directory": "packages/rpc-client" + }, + "publishConfig": { + "access": "public" + }, "type": "module", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -12,8 +21,13 @@ } }, "files": [ - "dist" + "dist", + "!dist/**/*.test.*" ], + "scripts": { + "build": "tsc -p tsconfig.json", + "test": "node --test dist/rpc-client.test.js" + }, "engines": { "node": ">=22.0.0" } diff --git a/packages/rpc-client/src/index.ts b/packages/rpc-client/src/index.ts new file mode 100644 index 000000000..3771a3359 --- /dev/null +++ b/packages/rpc-client/src/index.ts @@ -0,0 +1,10 @@ +/** + * @gsd-build/rpc-client — standalone RPC client SDK for GSD. + * + * Re-exports all types, JSONL utilities, and the RpcClient class. + */ + +export * from "./rpc-types.js"; +export { serializeJsonLine, attachJsonlLineReader } from "./jsonl.js"; +export { RpcClient } from "./rpc-client.js"; +export type { RpcClientOptions, RpcEventListener, SdkAgentEvent } from "./rpc-client.js"; diff --git a/packages/rpc-client/src/jsonl.ts b/packages/rpc-client/src/jsonl.ts new file mode 100644 index 000000000..5392defef --- /dev/null +++ b/packages/rpc-client/src/jsonl.ts @@ -0,0 +1,64 @@ +import type { Readable } from "node:stream"; +import { StringDecoder } from "node:string_decoder"; + +/** + * Serialize a single strict JSONL record. + * + * Framing is LF-only. Payload strings may contain other Unicode separators such as + * U+2028 and U+2029. Clients must split records on `\n` only. + */ +export function serializeJsonLine(value: unknown): string { + return `${JSON.stringify(value)}\n`; +} + +/** + * Attach an LF-only JSONL reader to a stream. + * + * This intentionally does not use Node readline. Readline splits on additional + * Unicode separators that are valid inside JSON strings and therefore does not + * implement strict JSONL framing. + */ +export function attachJsonlLineReader(stream: Readable, onLine: (line: string) => void): () => void { + const decoder = new StringDecoder("utf8"); + let buffer = ""; + + const emitLine = (line: string) => { + onLine(line.endsWith("\r") ? line.slice(0, -1) : line); + }; + + const onData = (chunk: string | Buffer) => { + buffer += typeof chunk === "string" ? chunk : decoder.write(chunk); + + while (true) { + const newlineIndex = buffer.indexOf("\n"); + if (newlineIndex === -1) { + return; + } + + emitLine(buffer.slice(0, newlineIndex)); + buffer = buffer.slice(newlineIndex + 1); + } + }; + + const onEnd = () => { + buffer += decoder.end(); + if (buffer.length > 0) { + emitLine(buffer); + buffer = ""; + } + }; + + const onError = (_err: Error) => { + // Stream errors are non-fatal for JSONL reading + }; + + stream.on("data", onData); + stream.on("end", onEnd); + stream.on("error", onError); + + return () => { + stream.off("data", onData); + stream.off("end", onEnd); + stream.off("error", onError); + }; +} diff --git a/packages/rpc-client/src/rpc-client.test.ts b/packages/rpc-client/src/rpc-client.test.ts new file mode 100644 index 000000000..9fcb7874f --- /dev/null +++ b/packages/rpc-client/src/rpc-client.test.ts @@ -0,0 +1,568 @@ +import { describe, it, beforeEach, afterEach } from "node:test"; +import assert from "node:assert/strict"; +import { PassThrough } from "node:stream"; +import { serializeJsonLine, attachJsonlLineReader } from "./jsonl.js"; +import type { + RpcInitResult, + RpcExecutionCompleteEvent, + RpcCostUpdateEvent, + RpcProtocolVersion, + SessionStats, + RpcV2Event, +} from "./rpc-types.js"; +import { RpcClient } from "./rpc-client.js"; +import type { SdkAgentEvent } from "./rpc-client.js"; + +// ============================================================================ +// JSONL Tests +// ============================================================================ + +describe("serializeJsonLine", () => { + it("produces valid JSON terminated with LF", () => { + const result = serializeJsonLine({ type: "test", value: 42 }); + assert.ok(result.endsWith("\n"), "must end with LF"); + const parsed = JSON.parse(result.trim()); + assert.equal(parsed.type, "test"); + assert.equal(parsed.value, 42); + }); + + it("serializes strings with special characters", () => { + const result = serializeJsonLine({ msg: "hello\nworld" }); + assert.ok(result.endsWith("\n")); + // The embedded \n must be escaped inside the JSON — only the trailing LF is the framing delimiter + const lines = result.split("\n"); + // Should be exactly 2 parts: the JSON line and the empty string after trailing LF + assert.equal(lines.length, 2); + assert.equal(lines[1], ""); + const parsed = JSON.parse(lines[0]); + assert.equal(parsed.msg, "hello\nworld"); + }); + + it("handles empty objects", () => { + const result = serializeJsonLine({}); + assert.equal(result, "{}\n"); + }); +}); + +describe("attachJsonlLineReader", () => { + it("splits on LF correctly", async () => { + const stream = new PassThrough(); + const lines: string[] = []; + + attachJsonlLineReader(stream, (line) => lines.push(line)); + + stream.write('{"a":1}\n{"b":2}\n'); + stream.end(); + + // Let microtask queue flush + await new Promise((r) => setTimeout(r, 10)); + + assert.equal(lines.length, 2); + assert.equal(JSON.parse(lines[0]).a, 1); + assert.equal(JSON.parse(lines[1]).b, 2); + }); + + it("handles chunked data across boundaries", async () => { + const stream = new PassThrough(); + const lines: string[] = []; + + attachJsonlLineReader(stream, (line) => lines.push(line)); + + // Write in fragments that split mid-line + stream.write('{"type":"hel'); + stream.write('lo"}\n{"type":"w'); + stream.write('orld"}\n'); + stream.end(); + + await new Promise((r) => setTimeout(r, 10)); + + assert.equal(lines.length, 2); + assert.equal(JSON.parse(lines[0]).type, "hello"); + assert.equal(JSON.parse(lines[1]).type, "world"); + }); + + it("emits trailing data on stream end", async () => { + const stream = new PassThrough(); + const lines: string[] = []; + + attachJsonlLineReader(stream, (line) => lines.push(line)); + + stream.write('{"final":true}'); + stream.end(); + + await new Promise((r) => setTimeout(r, 10)); + + assert.equal(lines.length, 1); + assert.equal(JSON.parse(lines[0]).final, true); + }); + + it("returns a detach function that stops reading", async () => { + const stream = new PassThrough(); + const lines: string[] = []; + + const detach = attachJsonlLineReader(stream, (line) => lines.push(line)); + + stream.write('{"a":1}\n'); + await new Promise((r) => setTimeout(r, 10)); + assert.equal(lines.length, 1); + + detach(); + + stream.write('{"b":2}\n'); + stream.end(); + await new Promise((r) => setTimeout(r, 10)); + + // Should still be 1 — detach removed listeners + assert.equal(lines.length, 1); + }); + + it("strips CR from CRLF line endings", async () => { + const stream = new PassThrough(); + const lines: string[] = []; + + attachJsonlLineReader(stream, (line) => lines.push(line)); + + stream.write('{"v":1}\r\n'); + stream.end(); + + await new Promise((r) => setTimeout(r, 10)); + + assert.equal(lines.length, 1); + assert.equal(JSON.parse(lines[0]).v, 1); + }); +}); + +// ============================================================================ +// Type Shape Tests +// ============================================================================ + +describe("type shapes", () => { + it("RpcInitResult has protocolVersion, sessionId, capabilities", () => { + const init: RpcInitResult = { + protocolVersion: 2, + sessionId: "sess_123", + capabilities: { + events: ["execution_complete", "cost_update"], + commands: ["prompt", "steer"], + }, + }; + assert.equal(init.protocolVersion, 2); + assert.equal(init.sessionId, "sess_123"); + assert.ok(Array.isArray(init.capabilities.events)); + assert.ok(Array.isArray(init.capabilities.commands)); + }); + + it("RpcExecutionCompleteEvent has required fields", () => { + const event: RpcExecutionCompleteEvent = { + type: "execution_complete", + runId: "run_abc", + status: "completed", + stats: { + sessionFile: "/tmp/session.json", + sessionId: "sess_123", + userMessages: 5, + assistantMessages: 5, + toolCalls: 3, + toolResults: 3, + totalMessages: 10, + tokens: { input: 1000, output: 500, cacheRead: 200, cacheWrite: 100, total: 1800 }, + cost: 0.05, + }, + }; + assert.equal(event.type, "execution_complete"); + assert.equal(event.runId, "run_abc"); + assert.equal(event.status, "completed"); + assert.ok(event.stats); + assert.equal(event.stats.sessionId, "sess_123"); + }); + + it("RpcCostUpdateEvent has required fields", () => { + const event: RpcCostUpdateEvent = { + type: "cost_update", + runId: "run_abc", + turnCost: 0.01, + cumulativeCost: 0.05, + tokens: { input: 500, output: 200, cacheRead: 100, cacheWrite: 50 }, + }; + assert.equal(event.type, "cost_update"); + assert.equal(event.runId, "run_abc"); + assert.equal(event.turnCost, 0.01); + assert.equal(event.cumulativeCost, 0.05); + assert.ok(event.tokens); + }); + + it("SessionStats has all expected fields", () => { + const stats: SessionStats = { + sessionFile: "/tmp/session.json", + sessionId: "s1", + userMessages: 10, + assistantMessages: 10, + toolCalls: 5, + toolResults: 5, + totalMessages: 20, + tokens: { input: 2000, output: 1000, cacheRead: 500, cacheWrite: 200, total: 3700 }, + cost: 0.10, + }; + assert.equal(stats.sessionId, "s1"); + assert.equal(stats.userMessages, 10); + assert.equal(stats.tokens.total, 3700); + assert.equal(stats.cost, 0.10); + }); + + it("RpcProtocolVersion accepts 1 and 2", () => { + const v1: RpcProtocolVersion = 1; + const v2: RpcProtocolVersion = 2; + assert.equal(v1, 1); + assert.equal(v2, 2); + }); + + it("RpcV2Event discriminated union covers both event types", () => { + const events: RpcV2Event[] = [ + { + type: "execution_complete", + runId: "r1", + status: "completed", + stats: { + sessionFile: undefined, + sessionId: "s1", + userMessages: 1, + assistantMessages: 1, + toolCalls: 0, + toolResults: 0, + totalMessages: 2, + tokens: { input: 100, output: 50, cacheRead: 0, cacheWrite: 0, total: 150 }, + cost: 0.001, + }, + }, + { + type: "cost_update", + runId: "r1", + turnCost: 0.001, + cumulativeCost: 0.001, + tokens: { input: 100, output: 50, cacheRead: 0, cacheWrite: 0 }, + }, + ]; + assert.equal(events.length, 2); + assert.equal(events[0].type, "execution_complete"); + assert.equal(events[1].type, "cost_update"); + }); +}); + +// ============================================================================ +// RpcClient Construction Tests +// ============================================================================ + +describe("RpcClient construction", () => { + it("creates with default options", () => { + const client = new RpcClient(); + assert.ok(client); + }); + + it("creates with custom options", () => { + const client = new RpcClient({ + cliPath: "/usr/local/bin/gsd", + cwd: "/tmp", + env: { NODE_ENV: "test" }, + provider: "anthropic", + model: "claude-sonnet", + args: ["--verbose"], + }); + assert.ok(client); + }); +}); + +// ============================================================================ +// events() Generator Tests +// ============================================================================ + +describe("events() async generator", () => { + it("yields events from a mock stream in order", async () => { + const client = new RpcClient(); + + // Reach into the client to set up a mock process with a PassThrough stdout + const mockStdout = new PassThrough(); + const mockStderr = new PassThrough(); + const mockStdin = new PassThrough(); + + // Simulate a started process by setting internal state + // We use Object.assign to set private fields for testing + const clientAny = client as any; + clientAny.process = { + stdout: mockStdout, + stderr: mockStderr, + stdin: mockStdin, + exitCode: null, + kill: () => {}, + on: (event: string, handler: (...args: any[]) => void) => { + if (event === "exit") { + // Store exit handler so we can trigger it + clientAny._testExitHandler = handler; + } + }, + removeListener: () => {}, + }; + + // Attach the JSONL reader like start() does + clientAny.stopReadingStdout = attachJsonlLineReader(mockStdout, (line: string) => { + clientAny.handleLine(line); + }); + + // Collect events from the generator + const received: SdkAgentEvent[] = []; + const genPromise = (async () => { + for await (const event of client.events()) { + received.push(event); + if (event.type === "done") break; + } + })(); + + // Simulate server sending events + await new Promise((r) => setTimeout(r, 20)); + mockStdout.write(serializeJsonLine({ type: "agent_start", runId: "r1" })); + await new Promise((r) => setTimeout(r, 20)); + mockStdout.write(serializeJsonLine({ type: "token", text: "hello" })); + await new Promise((r) => setTimeout(r, 20)); + mockStdout.write(serializeJsonLine({ type: "done" })); + + await genPromise; + + assert.equal(received.length, 3); + assert.equal(received[0].type, "agent_start"); + assert.equal(received[1].type, "token"); + assert.equal(received[2].type, "done"); + }); + + it("terminates when process exits", async () => { + const client = new RpcClient(); + const mockStdout = new PassThrough(); + const mockStderr = new PassThrough(); + const mockStdin = new PassThrough(); + + const exitHandlers: Array<() => void> = []; + const clientAny = client as any; + clientAny.process = { + stdout: mockStdout, + stderr: mockStderr, + stdin: mockStdin, + exitCode: null, + kill: () => {}, + on: (event: string, handler: () => void) => { + if (event === "exit") exitHandlers.push(handler); + }, + removeListener: (event: string, handler: () => void) => { + const idx = exitHandlers.indexOf(handler); + if (idx !== -1) exitHandlers.splice(idx, 1); + }, + }; + + clientAny.stopReadingStdout = attachJsonlLineReader(mockStdout, (line: string) => { + clientAny.handleLine(line); + }); + + const received: SdkAgentEvent[] = []; + const genPromise = (async () => { + for await (const event of client.events()) { + received.push(event); + } + })(); + + // Send one event, then simulate process exit + await new Promise((r) => setTimeout(r, 20)); + mockStdout.write(serializeJsonLine({ type: "agent_start" })); + await new Promise((r) => setTimeout(r, 20)); + + // Fire exit handlers + for (const h of exitHandlers) h(); + + await genPromise; + + assert.equal(received.length, 1); + assert.equal(received[0].type, "agent_start"); + }); + + it("throws if client not started", async () => { + const client = new RpcClient(); + await assert.rejects(async () => { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + for await (const _event of client.events()) { + // should not reach + } + }, /Client not started/); + }); +}); + +// ============================================================================ +// sendUIResponse Serialization Test +// ============================================================================ + +describe("sendUIResponse serialization", () => { + it("writes correct JSONL to stdin", () => { + const client = new RpcClient(); + const chunks: string[] = []; + const mockStdin = { + write: (data: string) => { + chunks.push(data); + return true; + }, + }; + + const clientAny = client as any; + clientAny.process = { stdin: mockStdin }; + + client.sendUIResponse("ui_1", { value: "hello" }); + + assert.equal(chunks.length, 1); + const parsed = JSON.parse(chunks[0].trim()); + assert.equal(parsed.type, "extension_ui_response"); + assert.equal(parsed.id, "ui_1"); + assert.equal(parsed.value, "hello"); + }); + + it("serializes confirmed response", () => { + const client = new RpcClient(); + const chunks: string[] = []; + const mockStdin = { + write: (data: string) => { + chunks.push(data); + return true; + }, + }; + const clientAny = client as any; + clientAny.process = { stdin: mockStdin }; + + client.sendUIResponse("ui_2", { confirmed: true }); + + const parsed = JSON.parse(chunks[0].trim()); + assert.equal(parsed.confirmed, true); + assert.equal(parsed.id, "ui_2"); + }); + + it("serializes cancelled response", () => { + const client = new RpcClient(); + const chunks: string[] = []; + const mockStdin = { + write: (data: string) => { + chunks.push(data); + return true; + }, + }; + const clientAny = client as any; + clientAny.process = { stdin: mockStdin }; + + client.sendUIResponse("ui_3", { cancelled: true }); + + const parsed = JSON.parse(chunks[0].trim()); + assert.equal(parsed.cancelled, true); + }); +}); + +// ============================================================================ +// init/shutdown/subscribe Serialization Tests +// ============================================================================ + +describe("v2 command serialization", () => { + // Helper: capture what the client sends to stdin + function createMockClient(): { client: RpcClient; sent: any[]; respondNext: (data?: any) => void } { + const client = new RpcClient(); + const sent: any[] = []; + let respondFn: ((data: any) => void) | null = null; + + const clientAny = client as any; + clientAny.process = { + stdin: { + write: (data: string) => { + const parsed = JSON.parse(data.trim()); + sent.push(parsed); + // Auto-respond with success after a tick + if (respondFn) { + setTimeout(() => respondFn!(parsed), 5); + } + return true; + }, + }, + stderr: new PassThrough(), + exitCode: null, + kill: () => {}, + on: () => {}, + removeListener: () => {}, + }; + + const respondNext = (overrides: any = {}) => { + respondFn = (parsed) => { + const response = { + type: "response", + id: parsed.id, + command: parsed.type, + success: true, + data: {}, + ...overrides, + }; + clientAny.handleLine(JSON.stringify(response)); + }; + }; + + return { client, sent, respondNext }; + } + + it("init sends correct v2 init command", async () => { + const { client, sent, respondNext } = createMockClient(); + respondNext({ data: { protocolVersion: 2, sessionId: "s1", capabilities: { events: [], commands: [] } } }); + + const result = await client.init({ clientId: "test-app" }); + + assert.equal(sent.length, 1); + assert.equal(sent[0].type, "init"); + assert.equal(sent[0].protocolVersion, 2); + assert.equal(sent[0].clientId, "test-app"); + assert.equal(result.protocolVersion, 2); + assert.equal(result.sessionId, "s1"); + }); + + it("shutdown sends shutdown command", async () => { + const { client, sent, respondNext } = createMockClient(); + + // Override the process exit wait + const clientAny = client as any; + const originalProcess = clientAny.process; + const exitHandlers: Array<(code: number) => void> = []; + clientAny.process = { + ...originalProcess, + on: (event: string, handler: (code: number) => void) => { + if (event === "exit") exitHandlers.push(handler); + }, + }; + + respondNext(); + + // Call shutdown and simulate process exit + const shutdownPromise = client.shutdown(); + await new Promise((r) => setTimeout(r, 20)); + for (const h of exitHandlers) h(0); + + await shutdownPromise; + + assert.equal(sent.length, 1); + assert.equal(sent[0].type, "shutdown"); + }); + + it("subscribe sends subscribe command with event list", async () => { + const { client, sent, respondNext } = createMockClient(); + respondNext(); + + await client.subscribe(["execution_complete", "cost_update"]); + + assert.equal(sent.length, 1); + assert.equal(sent[0].type, "subscribe"); + assert.deepEqual(sent[0].events, ["execution_complete", "cost_update"]); + }); + + it("subscribe with wildcard", async () => { + const { client, sent, respondNext } = createMockClient(); + respondNext(); + + await client.subscribe(["*"]); + + assert.equal(sent[0].events.length, 1); + assert.equal(sent[0].events[0], "*"); + }); +}); diff --git a/packages/rpc-client/src/rpc-client.ts b/packages/rpc-client/src/rpc-client.ts new file mode 100644 index 000000000..4d5edc53c --- /dev/null +++ b/packages/rpc-client/src/rpc-client.ts @@ -0,0 +1,666 @@ +/** + * RPC Client for programmatic access to the coding agent. + * + * Spawns the agent in RPC mode and provides a typed API for all operations. + * This is a standalone SDK client — all types are inlined with zero internal + * package dependencies. + */ + +import { type ChildProcess, spawn } from "node:child_process"; +import { attachJsonlLineReader, serializeJsonLine } from "./jsonl.js"; +import type { + BashResult, + CompactionResult, + ImageContent, + ModelInfo, + RpcCommand, + RpcInitResult, + RpcResponse, + RpcSessionState, + RpcSlashCommand, + ThinkingLevel, + SessionStats, +} from "./rpc-types.js"; + +// ============================================================================ +// Types +// ============================================================================ + +/** Distributive Omit that works with union types */ +type DistributiveOmit = T extends unknown ? Omit : never; + +/** RpcCommand without the id field (for internal send) */ +type RpcCommandBody = DistributiveOmit; + +/** Agent event — a loosely-typed record from the server. The `type` field is always present. */ +export interface SdkAgentEvent { + type: string; + [key: string]: unknown; +} + +export interface RpcClientOptions { + /** Path to the CLI entry point (default: searches for dist/cli.js) */ + cliPath?: string; + /** Working directory for the agent */ + cwd?: string; + /** Environment variables */ + env?: Record; + /** Provider to use */ + provider?: string; + /** Model ID to use */ + model?: string; + /** Additional CLI arguments */ + args?: string[]; +} + +export type RpcEventListener = (event: SdkAgentEvent) => void; + +// ============================================================================ +// RPC Client +// ============================================================================ + +export class RpcClient { + private process: ChildProcess | null = null; + private stopReadingStdout: (() => void) | null = null; + private _stderrHandler?: (data: Buffer) => void; + private eventListeners: RpcEventListener[] = []; + private pendingRequests: Map void; reject: (error: Error) => void }> = + new Map(); + private requestId = 0; + private stderr = ""; + private _stopped = false; + + constructor(private options: RpcClientOptions = {}) {} + + /** + * Start the RPC agent process. + */ + async start(): Promise { + if (this.process) { + throw new Error("Client already started"); + } + + this._stopped = false; + + const cliPath = this.options.cliPath ?? "dist/cli.js"; + const args = ["--mode", "rpc"]; + + if (this.options.provider) { + args.push("--provider", this.options.provider); + } + if (this.options.model) { + args.push("--model", this.options.model); + } + if (this.options.args) { + args.push(...this.options.args); + } + + this.process = spawn("node", [cliPath, ...args], { + cwd: this.options.cwd, + env: { ...process.env, ...this.options.env }, + stdio: ["pipe", "pipe", "pipe"], + }); + + // Collect stderr for debugging + this._stderrHandler = (data: Buffer) => { + this.stderr += data.toString(); + }; + this.process.stderr?.on("data", this._stderrHandler); + + // Set up strict JSONL reader for stdout. + this.stopReadingStdout = attachJsonlLineReader(this.process.stdout!, (line) => { + this.handleLine(line); + }); + + // Detect unexpected subprocess exit and reject all pending requests + this.process.on("exit", (code, signal) => { + if (this.pendingRequests.size > 0) { + const reason = signal ? `signal ${signal}` : `code ${code}`; + const error = new Error(`Agent process exited unexpectedly (${reason}). Stderr: ${this.stderr}`); + for (const [id, pending] of this.pendingRequests) { + this.pendingRequests.delete(id); + pending.reject(error); + } + } + }); + + // Wait a moment for process to initialize + await new Promise((resolve) => setTimeout(resolve, 100)); + + if (this.process.exitCode !== null) { + throw new Error(`Agent process exited immediately with code ${this.process.exitCode}. Stderr: ${this.stderr}`); + } + } + + /** + * Stop the RPC agent process. + */ + async stop(): Promise { + if (!this.process) return; + + this._stopped = true; + + this.stopReadingStdout?.(); + this.stopReadingStdout = null; + if (this._stderrHandler) { + this.process.stderr?.removeListener("data", this._stderrHandler); + this._stderrHandler = undefined; + } + this.process.kill("SIGTERM"); + + // Wait for process to exit + await new Promise((resolve) => { + const timeout = setTimeout(() => { + this.process?.kill("SIGKILL"); + resolve(); + }, 1000); + + this.process?.on("exit", () => { + clearTimeout(timeout); + resolve(); + }); + }); + + this.process = null; + this.pendingRequests.clear(); + } + + /** + * Subscribe to agent events via callback. + */ + onEvent(listener: RpcEventListener): () => void { + this.eventListeners.push(listener); + return () => { + const index = this.eventListeners.indexOf(listener); + if (index !== -1) { + this.eventListeners.splice(index, 1); + } + }; + } + + /** + * Async generator that yields agent events as they arrive. + * + * Usage: + * ```ts + * for await (const event of client.events()) { + * console.log(event.type, event); + * } + * ``` + * + * The generator terminates when: + * - `stop()` is called + * - The agent process exits + * - The consumer breaks out of the loop + */ + async *events(): AsyncGenerator { + if (!this.process) { + throw new Error("Client not started — call start() before events()"); + } + + if (this._stopped) { + return; + } + + const buffer: SdkAgentEvent[] = []; + let resolve: ((value: void) => void) | null = null; + let done = false; + + // When a new event arrives, either push to buffer or wake up the awaiting generator + const listener = (event: SdkAgentEvent) => { + buffer.push(event); + if (resolve) { + const r = resolve; + resolve = null; + r(); + } + }; + + // When the process exits, signal the generator to stop + const onExit = () => { + done = true; + if (resolve) { + const r = resolve; + resolve = null; + r(); + } + }; + + const unsubscribe = this.onEvent(listener); + this.process.on("exit", onExit); + + try { + while (!done && !this._stopped) { + // Drain buffer first + while (buffer.length > 0) { + yield buffer.shift()!; + } + + // If done after draining, break + if (done || this._stopped) { + break; + } + + // Wait for next event or process exit + await new Promise((r) => { + resolve = r; + }); + } + + // Drain any remaining events that arrived with the exit signal + while (buffer.length > 0) { + yield buffer.shift()!; + } + } finally { + unsubscribe(); + this.process?.removeListener("exit", onExit); + } + } + + /** + * Get collected stderr output (useful for debugging). + */ + getStderr(): string { + return this.stderr; + } + + // ========================================================================= + // Command Methods + // ========================================================================= + + /** + * Send a prompt to the agent. + * Returns immediately after sending; use onEvent() or events() to receive streaming events. + * Use waitForIdle() to wait for completion. + */ + async prompt(message: string, images?: ImageContent[]): Promise { + await this.send({ type: "prompt", message, images }); + } + + /** + * Queue a steering message to interrupt the agent mid-run. + */ + async steer(message: string, images?: ImageContent[]): Promise { + await this.send({ type: "steer", message, images }); + } + + /** + * Queue a follow-up message to be processed after the agent finishes. + */ + async followUp(message: string, images?: ImageContent[]): Promise { + await this.send({ type: "follow_up", message, images }); + } + + /** + * Abort current operation. + */ + async abort(): Promise { + await this.send({ type: "abort" }); + } + + /** + * Start a new session, optionally with parent tracking. + * @param parentSession - Optional parent session path for lineage tracking + * @returns Object with `cancelled: true` if an extension cancelled the new session + */ + async newSession(parentSession?: string): Promise<{ cancelled: boolean }> { + const response = await this.send({ type: "new_session", parentSession }); + return this.getData(response); + } + + /** + * Get current session state. + */ + async getState(): Promise { + const response = await this.send({ type: "get_state" }); + return this.getData(response); + } + + /** + * Set model by provider and ID. + */ + async setModel(provider: string, modelId: string): Promise<{ provider: string; id: string }> { + const response = await this.send({ type: "set_model", provider, modelId }); + return this.getData(response); + } + + /** + * Cycle to next model. + */ + async cycleModel(): Promise<{ + model: { provider: string; id: string }; + thinkingLevel: ThinkingLevel; + isScoped: boolean; + } | null> { + const response = await this.send({ type: "cycle_model" }); + return this.getData(response); + } + + /** + * Get list of available models. + */ + async getAvailableModels(): Promise { + const response = await this.send({ type: "get_available_models" }); + return this.getData<{ models: ModelInfo[] }>(response).models; + } + + /** + * Set thinking level. + */ + async setThinkingLevel(level: ThinkingLevel): Promise { + await this.send({ type: "set_thinking_level", level }); + } + + /** + * Cycle thinking level. + */ + async cycleThinkingLevel(): Promise<{ level: ThinkingLevel } | null> { + const response = await this.send({ type: "cycle_thinking_level" }); + return this.getData(response); + } + + /** + * Set steering mode. + */ + async setSteeringMode(mode: "all" | "one-at-a-time"): Promise { + await this.send({ type: "set_steering_mode", mode }); + } + + /** + * Set follow-up mode. + */ + async setFollowUpMode(mode: "all" | "one-at-a-time"): Promise { + await this.send({ type: "set_follow_up_mode", mode }); + } + + /** + * Compact session context. + */ + async compact(customInstructions?: string): Promise { + const response = await this.send({ type: "compact", customInstructions }); + return this.getData(response); + } + + /** + * Set auto-compaction enabled/disabled. + */ + async setAutoCompaction(enabled: boolean): Promise { + await this.send({ type: "set_auto_compaction", enabled }); + } + + /** + * Set auto-retry enabled/disabled. + */ + async setAutoRetry(enabled: boolean): Promise { + await this.send({ type: "set_auto_retry", enabled }); + } + + /** + * Abort in-progress retry. + */ + async abortRetry(): Promise { + await this.send({ type: "abort_retry" }); + } + + /** + * Execute a bash command. + */ + async bash(command: string): Promise { + const response = await this.send({ type: "bash", command }); + return this.getData(response); + } + + /** + * Abort running bash command. + */ + async abortBash(): Promise { + await this.send({ type: "abort_bash" }); + } + + /** + * Get session statistics. + */ + async getSessionStats(): Promise { + const response = await this.send({ type: "get_session_stats" }); + return this.getData(response); + } + + /** + * Export session to HTML. + */ + async exportHtml(outputPath?: string): Promise<{ path: string }> { + const response = await this.send({ type: "export_html", outputPath }); + return this.getData(response); + } + + /** + * Switch to a different session file. + * @returns Object with `cancelled: true` if an extension cancelled the switch + */ + async switchSession(sessionPath: string): Promise<{ cancelled: boolean }> { + const response = await this.send({ type: "switch_session", sessionPath }); + return this.getData(response); + } + + /** + * Fork from a specific message. + * @returns Object with `text` (the message text) and `cancelled` (if extension cancelled) + */ + async fork(entryId: string): Promise<{ text: string; cancelled: boolean }> { + const response = await this.send({ type: "fork", entryId }); + return this.getData(response); + } + + /** + * Get messages available for forking. + */ + async getForkMessages(): Promise> { + const response = await this.send({ type: "get_fork_messages" }); + return this.getData<{ messages: Array<{ entryId: string; text: string }> }>(response).messages; + } + + /** + * Get text of last assistant message. + */ + async getLastAssistantText(): Promise { + const response = await this.send({ type: "get_last_assistant_text" }); + return this.getData<{ text: string | null }>(response).text; + } + + /** + * Set the session display name. + */ + async setSessionName(name: string): Promise { + await this.send({ type: "set_session_name", name }); + } + + /** + * Get all messages in the session. + * Messages are returned as opaque objects — the internal structure may vary. + */ + async getMessages(): Promise { + const response = await this.send({ type: "get_messages" }); + return this.getData<{ messages: unknown[] }>(response).messages; + } + + /** + * Get available commands (extension commands, prompt templates, skills). + */ + async getCommands(): Promise { + const response = await this.send({ type: "get_commands" }); + return this.getData<{ commands: RpcSlashCommand[] }>(response).commands; + } + + /** + * Send a UI response to a pending extension_ui_request. + * Fire-and-forget — no request/response correlation. + */ + sendUIResponse(id: string, response: { value?: string; values?: string[]; confirmed?: boolean; cancelled?: boolean }): void { + if (!this.process?.stdin) { + throw new Error("Client not started"); + } + this.process.stdin.write(serializeJsonLine({ + type: "extension_ui_response", + id, + ...response, + })); + } + + /** + * Initialize a v2 protocol session. Must be sent as the first command. + * Returns the negotiated protocol version, session ID, and server capabilities. + */ + async init(options?: { clientId?: string }): Promise { + const response = await this.send({ type: "init", protocolVersion: 2, clientId: options?.clientId }); + return this.getData(response); + } + + /** + * Request a graceful shutdown of the agent process. + * Waits for the response before the process exits. + */ + async shutdown(): Promise { + await this.send({ type: "shutdown" }); + // Wait for process to exit after shutdown acknowledgment + if (this.process) { + await new Promise((resolve) => { + const timeout = setTimeout(() => { + this.process?.kill("SIGKILL"); + resolve(); + }, 5000); + this.process?.on("exit", () => { + clearTimeout(timeout); + resolve(); + }); + }); + } + } + + /** + * Subscribe to specific event types (v2 only). + * Pass ["*"] to receive all events, or a list of event type strings to filter. + */ + async subscribe(events: string[]): Promise { + await this.send({ type: "subscribe", events }); + } + + // ========================================================================= + // Helpers + // ========================================================================= + + /** + * Wait for agent to become idle (no streaming). + * Resolves when agent_end event is received. + */ + waitForIdle(timeout = 60000): Promise { + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + unsubscribe(); + reject(new Error(`Timeout waiting for agent to become idle. Stderr: ${this.stderr}`)); + }, timeout); + + const unsubscribe = this.onEvent((event) => { + if (event.type === "agent_end") { + clearTimeout(timer); + unsubscribe(); + resolve(); + } + }); + }); + } + + /** + * Collect events until agent becomes idle. + */ + collectEvents(timeout = 60000): Promise { + return new Promise((resolve, reject) => { + const events: SdkAgentEvent[] = []; + const timer = setTimeout(() => { + unsubscribe(); + reject(new Error(`Timeout collecting events. Stderr: ${this.stderr}`)); + }, timeout); + + const unsubscribe = this.onEvent((event) => { + events.push(event); + if (event.type === "agent_end") { + clearTimeout(timer); + unsubscribe(); + resolve(events); + } + }); + }); + } + + /** + * Send prompt and wait for completion, returning all events. + */ + async promptAndWait(message: string, images?: ImageContent[], timeout = 60000): Promise { + const eventsPromise = this.collectEvents(timeout); + await this.prompt(message, images); + return eventsPromise; + } + + // ========================================================================= + // Internal + // ========================================================================= + + private handleLine(line: string): void { + try { + const data = JSON.parse(line); + + // Check if it's a response to a pending request + if (data.type === "response" && data.id && this.pendingRequests.has(data.id)) { + const pending = this.pendingRequests.get(data.id)!; + this.pendingRequests.delete(data.id); + pending.resolve(data as RpcResponse); + return; + } + + // Otherwise it's an event — dispatch to listeners + for (const listener of this.eventListeners) { + listener(data as SdkAgentEvent); + } + } catch { + // Ignore non-JSON lines + } + } + + private async send(command: RpcCommandBody): Promise { + if (!this.process?.stdin) { + throw new Error("Client not started"); + } + + const id = `req_${++this.requestId}`; + const fullCommand = { ...command, id } as RpcCommand; + + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + this.pendingRequests.delete(id); + reject(new Error(`Timeout waiting for response to ${command.type}. Stderr: ${this.stderr}`)); + }, 30000); + + this.pendingRequests.set(id, { + resolve: (response) => { + clearTimeout(timeout); + resolve(response); + }, + reject: (error) => { + clearTimeout(timeout); + reject(error); + }, + }); + + this.process!.stdin!.write(serializeJsonLine(fullCommand)); + }); + } + + private getData(response: RpcResponse): T { + if (!response.success) { + const errorResponse = response as Extract; + throw new Error(errorResponse.error); + } + // Type assertion: we trust response.data matches T based on the command sent. + const successResponse = response as Extract; + return successResponse.data as T; + } +} diff --git a/packages/rpc-client/src/rpc-types.ts b/packages/rpc-client/src/rpc-types.ts new file mode 100644 index 000000000..be8bca73b --- /dev/null +++ b/packages/rpc-client/src/rpc-types.ts @@ -0,0 +1,399 @@ +/** + * RPC protocol types for headless operation. + * + * Commands are sent as JSON lines on stdin. + * Responses and events are emitted as JSON lines on stdout. + * + * This file is self-contained — all types that were previously imported from + * internal packages are inlined so that this package has zero internal + * dependencies. + */ + +// ============================================================================ +// Inlined types (originally from internal packages) +// ============================================================================ + +/** Thinking budget level (inlined from agent-core) */ +export type ThinkingLevel = "off" | "minimal" | "low" | "medium" | "high" | "xhigh"; + +/** Image attachment (inlined from pi-ai) */ +export interface ImageContent { + type: "image"; + data: string; // base64 encoded image data + mimeType: string; // e.g., "image/jpeg", "image/png" +} + +/** Model descriptor — opaque for SDK consumers */ +export interface ModelInfo { + provider: string; + id: string; + contextWindow?: number; + reasoning?: boolean; + [key: string]: unknown; +} + +/** Session statistics (from agent-session.ts) */ +export interface SessionStats { + sessionFile: string | undefined; + sessionId: string; + userMessages: number; + assistantMessages: number; + toolCalls: number; + toolResults: number; + totalMessages: number; + tokens: { + input: number; + output: number; + cacheRead: number; + cacheWrite: number; + total: number; + }; + cost: number; +} + +/** Bash command result (from bash-executor.ts) */ +export interface BashResult { + /** Combined stdout + stderr output (sanitized, possibly truncated) */ + output: string; + /** Process exit code (undefined if killed/cancelled) */ + exitCode: number | undefined; + /** Whether the command was cancelled via signal */ + cancelled: boolean; + /** Whether the output was truncated */ + truncated: boolean; + /** Path to temp file containing full output (if output exceeded truncation threshold) */ + fullOutputPath?: string; +} + +/** Compaction result (from compaction.ts) */ +export interface CompactionResult { + summary: string; + firstKeptEntryId: string; + tokensBefore: number; + /** Extension-specific data (e.g., ArtifactIndex, version markers for structured compaction) */ + details?: T; +} + +// ============================================================================ +// RPC Protocol Versioning +// ============================================================================ + +/** Supported protocol versions. v1 is the implicit default; v2 requires an init handshake. */ +export type RpcProtocolVersion = 1 | 2; + +// ============================================================================ +// RPC Commands (stdin) +// ============================================================================ + +export type RpcCommand = + // Prompting + | { id?: string; type: "prompt"; message: string; images?: ImageContent[]; streamingBehavior?: "steer" | "followUp" } + | { id?: string; type: "steer"; message: string; images?: ImageContent[] } + | { id?: string; type: "follow_up"; message: string; images?: ImageContent[] } + | { id?: string; type: "abort" } + | { id?: string; type: "new_session"; parentSession?: string } + + // State + | { id?: string; type: "get_state" } + + // Model + | { id?: string; type: "set_model"; provider: string; modelId: string } + | { id?: string; type: "cycle_model" } + | { id?: string; type: "get_available_models" } + + // Thinking + | { id?: string; type: "set_thinking_level"; level: ThinkingLevel } + | { id?: string; type: "cycle_thinking_level" } + + // Queue modes + | { id?: string; type: "set_steering_mode"; mode: "all" | "one-at-a-time" } + | { id?: string; type: "set_follow_up_mode"; mode: "all" | "one-at-a-time" } + + // Compaction + | { id?: string; type: "compact"; customInstructions?: string } + | { id?: string; type: "set_auto_compaction"; enabled: boolean } + + // Retry + | { id?: string; type: "set_auto_retry"; enabled: boolean } + | { id?: string; type: "abort_retry" } + + // Bash + | { id?: string; type: "bash"; command: string } + | { id?: string; type: "abort_bash" } + + // Session + | { id?: string; type: "get_session_stats" } + | { id?: string; type: "export_html"; outputPath?: string } + | { id?: string; type: "switch_session"; sessionPath: string } + | { id?: string; type: "fork"; entryId: string } + | { id?: string; type: "get_fork_messages" } + | { id?: string; type: "get_last_assistant_text" } + | { id?: string; type: "set_session_name"; name: string } + + // Messages + | { id?: string; type: "get_messages" } + + // Commands (available for invocation via prompt) + | { id?: string; type: "get_commands" } + + // Bridge-hosted native terminal + | { id?: string; type: "terminal_input"; data: string } + | { id?: string; type: "terminal_resize"; cols: number; rows: number } + | { id?: string; type: "terminal_redraw" } + + // v2 Protocol + | { id?: string; type: "init"; protocolVersion: 2; clientId?: string } + | { id?: string; type: "shutdown"; graceful?: boolean } + | { id?: string; type: "subscribe"; events: string[] }; + +// ============================================================================ +// RPC Slash Command (for get_commands response) +// ============================================================================ + +/** A command available for invocation via prompt */ +export interface RpcSlashCommand { + /** Command name (without leading slash) */ + name: string; + /** Human-readable description */ + description?: string; + /** What kind of command this is */ + source: "extension" | "prompt" | "skill"; + /** Where the command was loaded from (undefined for extensions) */ + location?: "user" | "project" | "path"; + /** File path to the command source */ + path?: string; +} + +// ============================================================================ +// RPC State +// ============================================================================ + +export interface RpcSessionState { + model?: ModelInfo; + thinkingLevel: ThinkingLevel; + isStreaming: boolean; + isCompacting: boolean; + steeringMode: "all" | "one-at-a-time"; + followUpMode: "all" | "one-at-a-time"; + sessionFile?: string; + sessionId: string; + sessionName?: string; + autoCompactionEnabled: boolean; + autoRetryEnabled: boolean; + retryInProgress: boolean; + retryAttempt: number; + messageCount: number; + pendingMessageCount: number; + /** Whether extension loading has completed. Commands from `get_commands` may be incomplete until true. */ + extensionsReady: boolean; +} + +// ============================================================================ +// RPC Responses (stdout) +// ============================================================================ + +// Success responses with data +export type RpcResponse = + // Prompting (async - events follow) + | { id?: string; type: "response"; command: "prompt"; success: true; runId?: string } + | { id?: string; type: "response"; command: "steer"; success: true; runId?: string } + | { id?: string; type: "response"; command: "follow_up"; success: true; runId?: string } + | { id?: string; type: "response"; command: "abort"; success: true } + | { id?: string; type: "response"; command: "new_session"; success: true; data: { cancelled: boolean } } + + // State + | { id?: string; type: "response"; command: "get_state"; success: true; data: RpcSessionState } + + // Model + | { + id?: string; + type: "response"; + command: "set_model"; + success: true; + data: ModelInfo; + } + | { + id?: string; + type: "response"; + command: "cycle_model"; + success: true; + data: { model: ModelInfo; thinkingLevel: ThinkingLevel; isScoped: boolean } | null; + } + | { + id?: string; + type: "response"; + command: "get_available_models"; + success: true; + data: { models: ModelInfo[] }; + } + + // Thinking + | { id?: string; type: "response"; command: "set_thinking_level"; success: true } + | { + id?: string; + type: "response"; + command: "cycle_thinking_level"; + success: true; + data: { level: ThinkingLevel } | null; + } + + // Queue modes + | { id?: string; type: "response"; command: "set_steering_mode"; success: true } + | { id?: string; type: "response"; command: "set_follow_up_mode"; success: true } + + // Compaction + | { id?: string; type: "response"; command: "compact"; success: true; data: CompactionResult } + | { id?: string; type: "response"; command: "set_auto_compaction"; success: true } + + // Retry + | { id?: string; type: "response"; command: "set_auto_retry"; success: true } + | { id?: string; type: "response"; command: "abort_retry"; success: true } + + // Bash + | { id?: string; type: "response"; command: "bash"; success: true; data: BashResult } + | { id?: string; type: "response"; command: "abort_bash"; success: true } + + // Session + | { id?: string; type: "response"; command: "get_session_stats"; success: true; data: SessionStats } + | { id?: string; type: "response"; command: "export_html"; success: true; data: { path: string } } + | { id?: string; type: "response"; command: "switch_session"; success: true; data: { cancelled: boolean } } + | { id?: string; type: "response"; command: "fork"; success: true; data: { text: string; cancelled: boolean } } + | { + id?: string; + type: "response"; + command: "get_fork_messages"; + success: true; + data: { messages: Array<{ entryId: string; text: string }> }; + } + | { + id?: string; + type: "response"; + command: "get_last_assistant_text"; + success: true; + data: { text: string | null }; + } + | { id?: string; type: "response"; command: "set_session_name"; success: true } + + // Messages — AgentMessage is opaque for SDK consumers + | { id?: string; type: "response"; command: "get_messages"; success: true; data: { messages: unknown[] } } + + // Commands + | { + id?: string; + type: "response"; + command: "get_commands"; + success: true; + data: { commands: RpcSlashCommand[] }; + } + + // Bridge-hosted native terminal + | { id?: string; type: "response"; command: "terminal_input"; success: true } + | { id?: string; type: "response"; command: "terminal_resize"; success: true } + | { id?: string; type: "response"; command: "terminal_redraw"; success: true } + + // v2 Protocol + | { id?: string; type: "response"; command: "init"; success: true; data: RpcInitResult } + | { id?: string; type: "response"; command: "shutdown"; success: true } + | { id?: string; type: "response"; command: "subscribe"; success: true } + + // Error response (any command can fail) + | { id?: string; type: "response"; command: string; success: false; error: string }; + +// ============================================================================ +// v2 Protocol Types +// ============================================================================ + +/** Result of the init handshake (v2 only) */ +export interface RpcInitResult { + protocolVersion: 2; + sessionId: string; + capabilities: { + events: string[]; + commands: string[]; + }; +} + +/** v2 execution_complete event — emitted when a prompt/steer/follow_up finishes */ +export interface RpcExecutionCompleteEvent { + type: "execution_complete"; + runId: string; + status: "completed" | "error" | "cancelled"; + reason?: string; + stats: SessionStats; +} + +/** v2 cost_update event — emitted per-turn with running cost data */ +export interface RpcCostUpdateEvent { + type: "cost_update"; + runId: string; + turnCost: number; + cumulativeCost: number; + tokens: { + input: number; + output: number; + cacheRead: number; + cacheWrite: number; + }; +} + +/** Discriminated union of all v2-only event types */ +export type RpcV2Event = RpcExecutionCompleteEvent | RpcCostUpdateEvent; + +// ============================================================================ +// Extension UI Events (stdout) +// ============================================================================ + +/** Emitted when an extension needs user input */ +export type RpcExtensionUIRequest = + | { type: "extension_ui_request"; id: string; method: "select"; title: string; options: string[]; timeout?: number; allowMultiple?: boolean } + | { type: "extension_ui_request"; id: string; method: "confirm"; title: string; message: string; timeout?: number } + | { + type: "extension_ui_request"; + id: string; + method: "input"; + title: string; + placeholder?: string; + timeout?: number; + } + | { type: "extension_ui_request"; id: string; method: "editor"; title: string; prefill?: string } + | { + type: "extension_ui_request"; + id: string; + method: "notify"; + message: string; + notifyType?: "info" | "warning" | "error"; + } + | { + type: "extension_ui_request"; + id: string; + method: "setStatus"; + statusKey: string; + statusText: string | undefined; + } + | { + type: "extension_ui_request"; + id: string; + method: "setWidget"; + widgetKey: string; + widgetLines: string[] | undefined; + widgetPlacement?: "aboveEditor" | "belowEditor"; + } + | { type: "extension_ui_request"; id: string; method: "setTitle"; title: string } + | { type: "extension_ui_request"; id: string; method: "set_editor_text"; text: string }; + +// ============================================================================ +// Extension UI Commands (stdin) +// ============================================================================ + +/** Response to an extension UI request */ +export type RpcExtensionUIResponse = + | { type: "extension_ui_response"; id: string; value: string } + | { type: "extension_ui_response"; id: string; values: string[] } + | { type: "extension_ui_response"; id: string; confirmed: boolean } + | { type: "extension_ui_response"; id: string; cancelled: true }; + +// ============================================================================ +// Helper type for extracting command types +// ============================================================================ + +export type RpcCommandType = RpcCommand["type"]; diff --git a/packages/rpc-client/tsconfig.examples.json b/packages/rpc-client/tsconfig.examples.json new file mode 100644 index 000000000..8453c546d --- /dev/null +++ b/packages/rpc-client/tsconfig.examples.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2024", + "module": "Node16", + "lib": ["ES2024"], + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "moduleResolution": "Node16", + "noEmit": true, + "types": ["node"], + "paths": { + "@gsd-build/rpc-client": ["./src/index.ts"] + } + }, + "include": ["examples/**/*.ts"] +} diff --git a/packages/rpc-client/tsconfig.json b/packages/rpc-client/tsconfig.json new file mode 100644 index 000000000..779b48aca --- /dev/null +++ b/packages/rpc-client/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "target": "ES2024", + "module": "Node16", + "lib": ["ES2024"], + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "inlineSources": true, + "inlineSourceMap": false, + "moduleResolution": "Node16", + "resolveJsonModule": true, + "allowImportingTsExtensions": false, + "types": ["node"], + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "**/*.d.ts", "src/**/*.d.ts"] +} diff --git a/src/headless-ui.ts b/src/headless-ui.ts index 387be26ca..7beea6bef 100644 --- a/src/headless-ui.ts +++ b/src/headless-ui.ts @@ -8,7 +8,7 @@ import type { Readable } from 'node:stream' -import { RpcClient, attachJsonlLineReader, serializeJsonLine } from '@gsd/pi-coding-agent' +import { RpcClient, attachJsonlLineReader } from '@gsd/pi-coding-agent' // --------------------------------------------------------------------------- // Types @@ -34,10 +34,9 @@ export type { ExtensionUIRequest } export function handleExtensionUIRequest( event: ExtensionUIRequest, - writeToStdin: (data: string) => void, + client: RpcClient, ): void { const { id, method } = event - let response: Record switch (method) { case 'select': { @@ -49,32 +48,30 @@ export function handleExtensionUIRequest( const forceOption = event.options.find(o => o.toLowerCase().includes('force start')) if (forceOption) selected = forceOption } - response = { type: 'extension_ui_response', id, value: selected } + client.sendUIResponse(id, { value: selected }) break } case 'confirm': - response = { type: 'extension_ui_response', id, confirmed: true } + client.sendUIResponse(id, { confirmed: true }) break case 'input': - response = { type: 'extension_ui_response', id, value: '' } + client.sendUIResponse(id, { value: '' }) break case 'editor': - response = { type: 'extension_ui_response', id, value: event.prefill ?? '' } + client.sendUIResponse(id, { value: event.prefill ?? '' }) break case 'notify': case 'setStatus': case 'setWidget': case 'setTitle': case 'set_editor_text': - response = { type: 'extension_ui_response', id, value: '' } + client.sendUIResponse(id, { value: '' }) break default: process.stderr.write(`[headless] Warning: unknown extension_ui_request method "${method}", cancelling\n`) - response = { type: 'extension_ui_response', id, cancelled: true } + client.sendUIResponse(id, { cancelled: true }) break } - - writeToStdin(serializeJsonLine(response)) } // --------------------------------------------------------------------------- @@ -114,7 +111,6 @@ export function formatProgress(event: Record, verbose: boolean) // --------------------------------------------------------------------------- export function startSupervisedStdinReader( - stdinWriter: (data: string) => void, client: RpcClient, onResponse: (id: string) => void, ): () => void { @@ -130,12 +126,17 @@ export function startSupervisedStdinReader( const type = String(msg.type ?? '') switch (type) { - case 'extension_ui_response': - stdinWriter(line + '\n') - if (typeof msg.id === 'string') { - onResponse(msg.id) + case 'extension_ui_response': { + const id = String(msg.id ?? '') + const value = msg.value !== undefined ? String(msg.value) : undefined + const confirmed = typeof msg.confirmed === 'boolean' ? msg.confirmed : undefined + const cancelled = typeof msg.cancelled === 'boolean' ? msg.cancelled : undefined + client.sendUIResponse(id, { value, confirmed, cancelled }) + if (id) { + onResponse(id) } break + } case 'prompt': client.prompt(String(msg.message ?? '')) break diff --git a/src/headless.ts b/src/headless.ts index f332dbe89..4fe480501 100644 --- a/src/headless.ts +++ b/src/headless.ts @@ -17,7 +17,9 @@ import { join } from 'node:path' import { resolve } from 'node:path' import { ChildProcess } from 'node:child_process' -import { RpcClient } from '@gsd/pi-coding-agent' +import { RpcClient, SessionManager } from '@gsd/pi-coding-agent' +import type { SessionInfo } from '@gsd/pi-coding-agent' +import { getProjectSessionsDir } from './project-sessions.js' import { loadAndValidateAnswerFile, AnswerInjector } from './headless-answers.js' import { @@ -35,7 +37,7 @@ import { mapStatusToExitCode, } from './headless-events.js' -import type { OutputFormat } from './headless-types.js' +import type { OutputFormat, HeadlessJsonResult } from './headless-types.js' import { VALID_OUTPUT_FORMATS } from './headless-types.js' import { @@ -80,6 +82,39 @@ interface TrackedEvent { detail?: string } +// --------------------------------------------------------------------------- +// Resume Session Resolution +// --------------------------------------------------------------------------- + +export interface ResumeSessionResult { + session?: SessionInfo + error?: string +} + +/** + * Resolve a session prefix to a single session. + * Exact id match is preferred over prefix match. + * Returns `{ session }` on unique match or `{ error }` on 0/ambiguous matches. + */ +export function resolveResumeSession(sessions: SessionInfo[], prefix: string): ResumeSessionResult { + // Exact match takes priority + const exact = sessions.find(s => s.id === prefix) + if (exact) { + return { session: exact } + } + + // Prefix match + const matches = sessions.filter(s => s.id.startsWith(prefix)) + if (matches.length === 0) { + return { error: `No session matching '${prefix}' found` } + } + if (matches.length > 1) { + const list = matches.map(s => ` ${s.id}`).join('\n') + return { error: `Ambiguous session prefix '${prefix}' matches ${matches.length} sessions:\n${list}` } + } + return { session: matches[0] } +} + // --------------------------------------------------------------------------- // CLI Argument Parser // --------------------------------------------------------------------------- @@ -325,6 +360,40 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): let milestoneReady = false // tracks "Milestone X ready." for auto-chaining const recentEvents: TrackedEvent[] = [] + // JSON batch mode: cost aggregation (cumulative-max pattern per K004) + let cumulativeCostUsd = 0 + let cumulativeInputTokens = 0 + let cumulativeOutputTokens = 0 + let cumulativeCacheReadTokens = 0 + let cumulativeCacheWriteTokens = 0 + let lastSessionId: string | undefined + + // Emit HeadlessJsonResult to stdout for --output-format json batch mode + function emitBatchJsonResult(): void { + if (options.outputFormat !== 'json') return + const duration = Date.now() - startTime + const status: HeadlessJsonResult['status'] = blocked ? 'blocked' + : exitCode === EXIT_CANCELLED ? 'cancelled' + : exitCode === EXIT_ERROR ? (totalEvents === 0 ? 'error' : 'timeout') + : 'success' + const result: HeadlessJsonResult = { + status, + exitCode, + sessionId: lastSessionId, + duration, + cost: { + total: cumulativeCostUsd, + input_tokens: cumulativeInputTokens, + output_tokens: cumulativeOutputTokens, + cache_read_tokens: cumulativeCacheReadTokens, + cache_write_tokens: cumulativeCacheWriteTokens, + }, + toolCalls: toolCallCount, + events: totalEvents, + } + process.stdout.write(JSON.stringify(result) + '\n') + } + function trackEvent(event: Record): void { totalEvents++ const type = String(event.type ?? 'unknown') @@ -345,8 +414,11 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): if (recentEvents.length > 20) recentEvents.shift() } - // Stdin writer for sending extension_ui_response to child - let stdinWriter: ((data: string) => void) | null = null + // Client started flag — replaces old stdinWriter null-check + let clientStarted = false + // Adapter for AnswerInjector — wraps client.sendUIResponse in a writeToStdin-compatible callback + // Initialized after client.start(); events won't fire before then + let injectorStdinAdapter: (data: string) => void = () => {} // Supervised mode state const pendingResponseTimers = new Map>() @@ -401,20 +473,52 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): // Answer injector: observe events for question metadata injector?.observeEvent(eventObj) - // --json mode: forward events as JSONL to stdout (filtered if --events) - if (options.json) { + // --json / --output-format stream-json: forward events as JSONL to stdout (filtered if --events) + // --output-format json (batch mode): suppress streaming, track cost for final result + if (options.json && options.outputFormat === 'stream-json') { const eventType = String(eventObj.type ?? '') if (!options.eventFilter || options.eventFilter.has(eventType)) { process.stdout.write(JSON.stringify(eventObj) + '\n') } - } else { + } else if (options.outputFormat === 'json') { + // Batch mode: silently track cost_update events (cumulative-max per K004) + const eventType = String(eventObj.type ?? '') + if (eventType === 'cost_update') { + const data = eventObj as Record + const cumCost = data.cumulativeCost as Record | undefined + if (cumCost) { + cumulativeCostUsd = Math.max(cumulativeCostUsd, Number(cumCost.costUsd ?? 0)) + const tokens = data.tokens as Record | undefined + if (tokens) { + cumulativeInputTokens = Math.max(cumulativeInputTokens, tokens.input ?? 0) + cumulativeOutputTokens = Math.max(cumulativeOutputTokens, tokens.output ?? 0) + cumulativeCacheReadTokens = Math.max(cumulativeCacheReadTokens, tokens.cacheRead ?? 0) + cumulativeCacheWriteTokens = Math.max(cumulativeCacheWriteTokens, tokens.cacheWrite ?? 0) + } + } + } + // Track sessionId from init_result + if (eventType === 'init_result') { + lastSessionId = String((eventObj as Record).sessionId ?? '') + } + } else if (!options.json) { // Progress output to stderr const line = formatProgress(eventObj, !!options.verbose) if (line) process.stderr.write(line + '\n') } + // Handle execution_complete (v2 structured completion) + if (eventObj.type === 'execution_complete' && !completed) { + completed = true + const status = String(eventObj.status ?? 'success') + exitCode = mapStatusToExitCode(status) + if (eventObj.status === 'blocked') blocked = true + resolveCompletion() + return + } + // Handle extension_ui_request - if (eventObj.type === 'extension_ui_request' && stdinWriter) { + if (eventObj.type === 'extension_ui_request' && clientStarted) { // Check for terminal notification before auto-responding if (isBlockedNotification(eventObj)) { blocked = true @@ -431,7 +535,7 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): // Answer injection: try to handle with pre-supplied answers before supervised/auto if (injector && !FIRE_AND_FORGET_METHODS.has(String(eventObj.method ?? ''))) { - if (injector.tryHandle(eventObj, stdinWriter)) { + if (injector.tryHandle(eventObj, injectorStdinAdapter)) { if (completed) { exitCode = blocked ? EXIT_BLOCKED : EXIT_SUCCESS resolveCompletion() @@ -449,12 +553,12 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): const eventId = String(eventObj.id ?? '') const timer = setTimeout(() => { pendingResponseTimers.delete(eventId) - handleExtensionUIRequest(eventObj as unknown as ExtensionUIRequest, stdinWriter!) + handleExtensionUIRequest(eventObj as unknown as ExtensionUIRequest, client) process.stdout.write(JSON.stringify({ type: 'supervised_timeout', id: eventId, method }) + '\n') }, responseTimeout) pendingResponseTimers.set(eventId, timer) } else { - handleExtensionUIRequest(eventObj as unknown as ExtensionUIRequest, stdinWriter) + handleExtensionUIRequest(eventObj as unknown as ExtensionUIRequest, client) } // If we detected a terminal notification, resolve after responding @@ -481,11 +585,17 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): process.stderr.write('\n[headless] Interrupted, stopping child process...\n') interrupted = true exitCode = EXIT_CANCELLED - client.stop().finally(() => { - if (timeoutTimer) clearTimeout(timeoutTimer) - if (idleTimer) clearTimeout(idleTimer) - process.exit(exitCode) - }) + // Kill child process — don't await, just fire and exit. + // The main flow may be awaiting a promise that resolves when the child dies, + // which would race with this handler. Exit synchronously to ensure correct exit code. + try { client.stop().catch(() => {}) } catch {} + if (timeoutTimer) clearTimeout(timeoutTimer) + if (idleTimer) clearTimeout(idleTimer) + // Emit batch JSON result if in json mode before exiting + if (options.outputFormat === 'json') { + emitBatchJsonResult() + } + process.exit(exitCode) } process.on('SIGINT', signalHandler) process.on('SIGTERM', signalHandler) @@ -499,22 +609,55 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): process.exit(1) } - // Access stdin writer from the internal process - const internalProcess = (client as any).process as ChildProcess - if (!internalProcess?.stdin) { - process.stderr.write('[headless] Error: Cannot access child process stdin\n') - await client.stop() - if (timeoutTimer) clearTimeout(timeoutTimer) - process.exit(1) + // v2 protocol negotiation — attempt init for structured completion events + let v2Enabled = false + try { + await client.init({ clientId: 'gsd-headless' }) + v2Enabled = true + } catch { + process.stderr.write('[headless] Warning: v2 init failed, falling back to v1 string-matching\n') } - stdinWriter = (data: string) => { - internalProcess.stdin!.write(data) + clientStarted = true + + // --resume: resolve session ID and switch to it + if (options.resumeSession) { + const projectSessionsDir = getProjectSessionsDir(process.cwd()) + const sessions = await SessionManager.list(process.cwd(), projectSessionsDir) + const result = resolveResumeSession(sessions, options.resumeSession) + if (result.error) { + process.stderr.write(`[headless] Error: ${result.error}\n`) + await client.stop() + if (timeoutTimer) clearTimeout(timeoutTimer) + process.exit(1) + } + const matched = result.session! + const switchResult = await client.switchSession(matched.path) + if (switchResult.cancelled) { + process.stderr.write(`[headless] Error: Session switch to '${matched.id}' was cancelled by an extension\n`) + await client.stop() + if (timeoutTimer) clearTimeout(timeoutTimer) + process.exit(1) + } + process.stderr.write(`[headless] Resuming session ${matched.id}\n`) + } + + // Build injector adapter — wraps client.sendUIResponse for AnswerInjector's writeToStdin interface + injectorStdinAdapter = (data: string) => { + try { + const parsed = JSON.parse(data.trim()) + if (parsed.type === 'extension_ui_response' && parsed.id) { + const { id, value, values, confirmed, cancelled } = parsed + client.sendUIResponse(id, { value, values, confirmed, cancelled }) + } + } catch { + process.stderr.write('[headless] Warning: injector adapter received unparseable data\n') + } } // Start supervised stdin reader for orchestrator commands if (options.supervised) { - stopSupervisedReader = startSupervisedStdinReader(stdinWriter, client, (id) => { + stopSupervisedReader = startSupervisedStdinReader(client, (id) => { const timer = pendingResponseTimers.get(id) if (timer) { clearTimeout(timer) @@ -525,14 +668,18 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): process.stdin.resume() } - // Detect child process crash - internalProcess.on('exit', (code) => { - if (!completed) { - const msg = `[headless] Child process exited unexpectedly with code ${code ?? 'null'}\n` - process.stderr.write(msg) - exitCode = EXIT_ERROR - resolveCompletion() - } }) + // Detect child process crash (read-only exit event subscription — not stdin access) + const internalProcess = (client as any).process as ChildProcess + if (internalProcess) { + internalProcess.on('exit', (code) => { + if (!completed) { + const msg = `[headless] Child process exited unexpectedly with code ${code ?? 'null'}\n` + process.stderr.write(msg) + exitCode = EXIT_ERROR + resolveCompletion() + } + }) + } if (!options.json) { process.stderr.write(`[headless] Running /gsd ${options.command}${options.commandArgs.length > 0 ? ' ' + options.commandArgs.join(' ') : ''}...\n`) @@ -626,5 +773,8 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): } } + // Emit structured JSON result in batch mode + emitBatchJsonResult() + return { exitCode, interrupted } } diff --git a/src/tests/headless-v2-migration.test.ts b/src/tests/headless-v2-migration.test.ts new file mode 100644 index 000000000..cea747f40 --- /dev/null +++ b/src/tests/headless-v2-migration.test.ts @@ -0,0 +1,462 @@ +/** + * Tests for headless v2 migration — execution_complete handling, + * sendUIResponse-based auto-response, and v1 fallback behavior. + * + * Uses extracted logic mirrors to avoid importing modules with native + * dependencies (same pattern as headless-events.test.ts and headless-detection.test.ts). + */ + +import test from 'node:test' +import assert from 'node:assert/strict' + +// ─── Extracted exit codes (mirrors headless-events.ts) ────────────────────── + +const EXIT_SUCCESS = 0 +const EXIT_ERROR = 1 +const EXIT_BLOCKED = 10 + +function mapStatusToExitCode(status: string): number { + switch (status) { + case 'success': + case 'complete': + return EXIT_SUCCESS + case 'error': + case 'timeout': + return EXIT_ERROR + case 'blocked': + return EXIT_BLOCKED + case 'cancelled': + return 11 + default: + return EXIT_ERROR + } +} + +// ─── Extracted terminal detection (mirrors headless-events.ts) ────────────── + +const TERMINAL_PREFIXES = ['auto-mode stopped', 'step-mode stopped'] + +function isTerminalNotification(event: Record): boolean { + if (event.type !== 'extension_ui_request' || event.method !== 'notify') return false + const message = String(event.message ?? '').toLowerCase() + return TERMINAL_PREFIXES.some((prefix) => message.startsWith(prefix)) +} + +function isBlockedNotification(event: Record): boolean { + if (event.type !== 'extension_ui_request' || event.method !== 'notify') return false + const message = String(event.message ?? '').toLowerCase() + return message.includes('blocked:') +} + +// ─── Mock RpcClient ───────────────────────────────────────────────────────── + +interface SendUICall { + id: string + response: { value?: string; values?: string[]; confirmed?: boolean; cancelled?: boolean } +} + +class MockRpcClient { + sendUICalls: SendUICall[] = [] + initCalled = false + initShouldFail = false + + sendUIResponse(id: string, response: { value?: string; values?: string[]; confirmed?: boolean; cancelled?: boolean }): void { + this.sendUICalls.push({ id, response }) + } + + async init(_options?: { clientId?: string }): Promise<{ protocolVersion: number }> { + this.initCalled = true + if (this.initShouldFail) { + throw new Error('v2 init not supported') + } + return { protocolVersion: 2 } + } +} + +// ─── Extracted handleExtensionUIRequest (mirrors headless-ui.ts) ──────────── + +interface ExtensionUIRequest { + type: 'extension_ui_request' + id: string + method: string + title?: string + options?: string[] + message?: string + prefill?: string + [key: string]: unknown +} + +function handleExtensionUIRequest( + event: ExtensionUIRequest, + client: MockRpcClient, +): void { + const { id, method } = event + + switch (method) { + case 'select': { + const title = String(event.title ?? '') + let selected = event.options?.[0] ?? '' + if (title.includes('Auto-mode is running') && event.options) { + const forceOption = event.options.find(o => o.toLowerCase().includes('force start')) + if (forceOption) selected = forceOption + } + client.sendUIResponse(id, { value: selected }) + break + } + case 'confirm': + client.sendUIResponse(id, { confirmed: true }) + break + case 'input': + client.sendUIResponse(id, { value: '' }) + break + case 'editor': + client.sendUIResponse(id, { value: event.prefill ?? '' }) + break + case 'notify': + case 'setStatus': + case 'setWidget': + case 'setTitle': + case 'set_editor_text': + client.sendUIResponse(id, { value: '' }) + break + default: + client.sendUIResponse(id, { cancelled: true }) + break + } +} + +// ─── Simulated event handler (mirrors headless.ts event handler logic) ────── + +interface EventHandlerState { + completed: boolean + blocked: boolean + exitCode: number + v2Enabled: boolean +} + +function handleEvent( + eventObj: Record, + state: EventHandlerState, + client: MockRpcClient, +): void { + // execution_complete (v2 structured completion) + if (eventObj.type === 'execution_complete' && !state.completed) { + state.completed = true + const status = String(eventObj.status ?? 'success') + state.exitCode = mapStatusToExitCode(status) + if (eventObj.status === 'blocked') state.blocked = true + return + } + + // extension_ui_request (v1 fallback + UI responses) + if (eventObj.type === 'extension_ui_request') { + if (isBlockedNotification(eventObj)) { + state.blocked = true + } + + if (isTerminalNotification(eventObj)) { + state.completed = true + } + + handleExtensionUIRequest(eventObj as unknown as ExtensionUIRequest, client) + + if (state.completed) { + state.exitCode = state.blocked ? EXIT_BLOCKED : EXIT_SUCCESS + return + } + } +} + +// ─── execution_complete event handling ────────────────────────────────────── + +test('execution_complete with status success triggers completion with EXIT_SUCCESS', () => { + const client = new MockRpcClient() + const state: EventHandlerState = { completed: false, blocked: false, exitCode: -1, v2Enabled: true } + + handleEvent({ type: 'execution_complete', status: 'success' }, state, client) + + assert.equal(state.completed, true) + assert.equal(state.exitCode, EXIT_SUCCESS) + assert.equal(state.blocked, false) +}) + +test('execution_complete with status blocked sets blocked flag and EXIT_BLOCKED', () => { + const client = new MockRpcClient() + const state: EventHandlerState = { completed: false, blocked: false, exitCode: -1, v2Enabled: true } + + handleEvent({ type: 'execution_complete', status: 'blocked' }, state, client) + + assert.equal(state.completed, true) + assert.equal(state.blocked, true) + assert.equal(state.exitCode, EXIT_BLOCKED) +}) + +test('execution_complete with status error maps to EXIT_ERROR', () => { + const client = new MockRpcClient() + const state: EventHandlerState = { completed: false, blocked: false, exitCode: -1, v2Enabled: true } + + handleEvent({ type: 'execution_complete', status: 'error' }, state, client) + + assert.equal(state.completed, true) + assert.equal(state.exitCode, EXIT_ERROR) +}) + +test('execution_complete with missing status defaults to success', () => { + const client = new MockRpcClient() + const state: EventHandlerState = { completed: false, blocked: false, exitCode: -1, v2Enabled: true } + + handleEvent({ type: 'execution_complete' }, state, client) + + assert.equal(state.completed, true) + assert.equal(state.exitCode, EXIT_SUCCESS) +}) + +test('execution_complete ignored if already completed', () => { + const client = new MockRpcClient() + const state: EventHandlerState = { completed: true, blocked: false, exitCode: EXIT_SUCCESS, v2Enabled: true } + + handleEvent({ type: 'execution_complete', status: 'error' }, state, client) + + // Should not change exitCode because already completed + assert.equal(state.exitCode, EXIT_SUCCESS) +}) + +// ─── v1 string-matching fallback ──────────────────────────────────────────── + +test('v1 fallback: terminal notification still triggers completion', () => { + const client = new MockRpcClient() + const state: EventHandlerState = { completed: false, blocked: false, exitCode: -1, v2Enabled: false } + + handleEvent( + { type: 'extension_ui_request', method: 'notify', id: 'n1', message: 'Auto-mode stopped — all slices complete' }, + state, + client, + ) + + assert.equal(state.completed, true) + assert.equal(state.exitCode, EXIT_SUCCESS) +}) + +test('v1 fallback: blocked notification sets blocked flag', () => { + const client = new MockRpcClient() + const state: EventHandlerState = { completed: false, blocked: false, exitCode: -1, v2Enabled: false } + + handleEvent( + { type: 'extension_ui_request', method: 'notify', id: 'n1', message: 'Auto-mode stopped (Blocked: plan invalid)' }, + state, + client, + ) + + assert.equal(state.completed, true) + assert.equal(state.blocked, true) + assert.equal(state.exitCode, EXIT_BLOCKED) +}) + +test('string-matching fallback works when execution_complete never received', () => { + const client = new MockRpcClient() + const state: EventHandlerState = { completed: false, blocked: false, exitCode: -1, v2Enabled: false } + + // Simulate a normal session without execution_complete + handleEvent({ type: 'extension_ui_request', method: 'select', id: 'q1', options: ['option1'] }, state, client) + assert.equal(state.completed, false) + + handleEvent( + { type: 'extension_ui_request', method: 'notify', id: 'n1', message: 'Step-mode stopped — done' }, + state, + client, + ) + assert.equal(state.completed, true) + assert.equal(state.exitCode, EXIT_SUCCESS) +}) + +// ─── handleExtensionUIRequest uses client.sendUIResponse ──────────────────── + +test('handleExtensionUIRequest select calls sendUIResponse with value', () => { + const client = new MockRpcClient() + + handleExtensionUIRequest( + { type: 'extension_ui_request', id: 'sel1', method: 'select', options: ['option-a', 'option-b'] }, + client, + ) + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'sel1') + assert.equal(client.sendUICalls[0].response.value, 'option-a') +}) + +test('handleExtensionUIRequest confirm calls sendUIResponse with confirmed', () => { + const client = new MockRpcClient() + + handleExtensionUIRequest( + { type: 'extension_ui_request', id: 'conf1', method: 'confirm' }, + client, + ) + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'conf1') + assert.equal(client.sendUICalls[0].response.confirmed, true) +}) + +test('handleExtensionUIRequest input calls sendUIResponse with empty value', () => { + const client = new MockRpcClient() + + handleExtensionUIRequest( + { type: 'extension_ui_request', id: 'inp1', method: 'input' }, + client, + ) + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'inp1') + assert.equal(client.sendUICalls[0].response.value, '') +}) + +test('handleExtensionUIRequest notify calls sendUIResponse with empty value', () => { + const client = new MockRpcClient() + + handleExtensionUIRequest( + { type: 'extension_ui_request', id: 'not1', method: 'notify', message: 'Task complete' }, + client, + ) + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'not1') + assert.equal(client.sendUICalls[0].response.value, '') +}) + +test('handleExtensionUIRequest editor calls sendUIResponse with prefill', () => { + const client = new MockRpcClient() + + handleExtensionUIRequest( + { type: 'extension_ui_request', id: 'ed1', method: 'editor', prefill: 'initial text' }, + client, + ) + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'ed1') + assert.equal(client.sendUICalls[0].response.value, 'initial text') +}) + +test('handleExtensionUIRequest unknown method calls sendUIResponse with cancelled', () => { + const client = new MockRpcClient() + + handleExtensionUIRequest( + { type: 'extension_ui_request', id: 'unk1', method: 'unknown_method' }, + client, + ) + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'unk1') + assert.equal(client.sendUICalls[0].response.cancelled, true) +}) + +// ─── supervised stdin reader forwarding via sendUIResponse ────────────────── + +test('extension_ui_response forwarding extracts fields and calls sendUIResponse', () => { + // Simulates what startSupervisedStdinReader does with a parsed message + const client = new MockRpcClient() + + const msg = { type: 'extension_ui_response', id: 'resp1', value: 'chosen option', confirmed: undefined, cancelled: undefined } + const id = String(msg.id ?? '') + const value = msg.value !== undefined ? String(msg.value) : undefined + const confirmed = typeof msg.confirmed === 'boolean' ? msg.confirmed : undefined + const cancelled = typeof msg.cancelled === 'boolean' ? msg.cancelled : undefined + client.sendUIResponse(id, { value, confirmed, cancelled }) + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'resp1') + assert.equal(client.sendUICalls[0].response.value, 'chosen option') + assert.equal(client.sendUICalls[0].response.confirmed, undefined) + assert.equal(client.sendUICalls[0].response.cancelled, undefined) +}) + +test('extension_ui_response with confirmed=true forwards correctly', () => { + const client = new MockRpcClient() + + const msg = { type: 'extension_ui_response', id: 'resp2', confirmed: true } + const id = String(msg.id ?? '') + const confirmed = typeof msg.confirmed === 'boolean' ? msg.confirmed : undefined + client.sendUIResponse(id, { confirmed }) + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'resp2') + assert.equal(client.sendUICalls[0].response.confirmed, true) +}) + +// ─── v2 init negotiation ──────────────────────────────────────────────────── + +test('v2 init success sets v2Enabled', async () => { + const client = new MockRpcClient() + let v2Enabled = false + try { + await client.init({ clientId: 'gsd-headless' }) + v2Enabled = true + } catch { + // fall back to v1 + } + + assert.equal(client.initCalled, true) + assert.equal(v2Enabled, true) +}) + +test('v2 init failure falls back gracefully (v1 mode)', async () => { + const client = new MockRpcClient() + client.initShouldFail = true + let v2Enabled = false + try { + await client.init({ clientId: 'gsd-headless' }) + v2Enabled = true + } catch { + // fall back to v1 — this is expected + } + + assert.equal(client.initCalled, true) + assert.equal(v2Enabled, false) +}) + +// ─── injector adapter ─────────────────────────────────────────────────────── + +test('injector adapter parses serialized JSONL and calls sendUIResponse', () => { + const client = new MockRpcClient() + + // Simulate what the adapter does + const data = '{"type":"extension_ui_response","id":"inj1","value":"selected"}\n' + const parsed = JSON.parse(data.trim()) + if (parsed.type === 'extension_ui_response' && parsed.id) { + const { id, value, values, confirmed, cancelled } = parsed + client.sendUIResponse(id, { value, values, confirmed, cancelled }) + } + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'inj1') + assert.equal(client.sendUICalls[0].response.value, 'selected') +}) + +test('injector adapter handles cancelled response', () => { + const client = new MockRpcClient() + + const data = '{"type":"extension_ui_response","id":"inj2","cancelled":true}\n' + const parsed = JSON.parse(data.trim()) + if (parsed.type === 'extension_ui_response' && parsed.id) { + const { id, value, values, confirmed, cancelled } = parsed + client.sendUIResponse(id, { value, values, confirmed, cancelled }) + } + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'inj2') + assert.equal(client.sendUICalls[0].response.cancelled, true) +}) + +test('injector adapter handles multi-select values', () => { + const client = new MockRpcClient() + + const data = '{"type":"extension_ui_response","id":"inj3","values":["a","b"]}\n' + const parsed = JSON.parse(data.trim()) + if (parsed.type === 'extension_ui_response' && parsed.id) { + const { id, value, values, confirmed, cancelled } = parsed + client.sendUIResponse(id, { value, values, confirmed, cancelled }) + } + + assert.equal(client.sendUICalls.length, 1) + assert.equal(client.sendUICalls[0].id, 'inj3') + assert.deepEqual(client.sendUICalls[0].response.values, ['a', 'b']) +}) diff --git a/src/tests/integration/e2e-headless.test.ts b/src/tests/integration/e2e-headless.test.ts new file mode 100644 index 000000000..dfb9cd002 --- /dev/null +++ b/src/tests/integration/e2e-headless.test.ts @@ -0,0 +1,385 @@ +/** + * E2E integration tests for `gsd headless` runtime behavior. + * + * Spawns real `gsd headless` child processes and asserts on + * stdout/stderr/exit-code for: JSON batch mode, SIGINT exit code, + * stream-json NDJSON output, --resume error path, and invalid + * --output-format handling. + * + * These tests are structural — they do NOT require API keys. + * + * Prerequisite: npm run build must be run first. + * + * Run with: + * node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs \ + * --experimental-strip-types --test \ + * src/tests/integration/e2e-headless.test.ts + */ + +import test from "node:test"; +import assert from "node:assert/strict"; +import { spawn } from "node:child_process"; +import { existsSync, mkdtempSync, mkdirSync, rmSync } from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; + +const projectRoot = process.cwd(); +const loaderPath = join(projectRoot, "dist", "loader.js"); + +if (!existsSync(loaderPath)) { + throw new Error("dist/loader.js not found — run: npm run build"); +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +type RunResult = { + stdout: string; + stderr: string; + code: number | null; + timedOut: boolean; +}; + +/** + * Spawn `node dist/loader.js ...args` and collect output. + */ +function runGsd( + args: string[], + timeoutMs = 30_000, + env: NodeJS.ProcessEnv = {}, + cwd: string = projectRoot, +): Promise { + return new Promise((resolve) => { + let stdout = ""; + let stderr = ""; + let timedOut = false; + + const child = spawn("node", [loaderPath, ...args], { + cwd, + env: { ...process.env, ...env }, + stdio: ["pipe", "pipe", "pipe"], + }); + + child.stdout.on("data", (chunk: Buffer) => { stdout += chunk.toString(); }); + child.stderr.on("data", (chunk: Buffer) => { stderr += chunk.toString(); }); + + child.stdin.end(); + + const timer = setTimeout(() => { + timedOut = true; + child.kill("SIGTERM"); + }, timeoutMs); + + child.on("close", (code) => { + clearTimeout(timer); + resolve({ stdout, stderr, code, timedOut }); + }); + }); +} + +/** + * Spawn a child process with the ability to send signals mid-flight. + * Returns both the child and a promise that resolves with the result. + */ +function spawnGsd( + args: string[], + timeoutMs = 30_000, + env: NodeJS.ProcessEnv = {}, + cwd: string = projectRoot, +): { child: ReturnType; result: Promise } { + let stdout = ""; + let stderr = ""; + let timedOut = false; + + const child = spawn("node", [loaderPath, ...args], { + cwd, + env: { ...process.env, ...env }, + stdio: ["pipe", "pipe", "pipe"], + }); + + child.stdout!.on("data", (chunk: Buffer) => { stdout += chunk.toString(); }); + child.stderr!.on("data", (chunk: Buffer) => { stderr += chunk.toString(); }); + + child.stdin!.end(); + + const timer = setTimeout(() => { + timedOut = true; + child.kill("SIGTERM"); + }, timeoutMs); + + const result = new Promise((resolve) => { + child.on("close", (code) => { + clearTimeout(timer); + resolve({ stdout, stderr, code, timedOut }); + }); + }); + + return { child, result }; +} + +/** Strip ANSI escape codes from a string. */ +function stripAnsi(s: string): string { + // eslint-disable-next-line no-control-regex + return s.replace(/\x1b\[[0-9;]*[A-Za-z]/g, ""); +} + +/** Bootstrap a temp directory with .gsd/ structure (milestones + runtime). */ +function createTempWithGsd(prefix: string): string { + const dir = mkdtempSync(join(tmpdir(), prefix)); + mkdirSync(join(dir, ".gsd", "milestones"), { recursive: true }); + mkdirSync(join(dir, ".gsd", "runtime"), { recursive: true }); + return dir; +} + +/** Assert no crash markers in output. */ +function assertNoCrashMarkers(output: string): void { + const crashMarkers = [ + "SyntaxError:", + "ReferenceError:", + "TypeError: Cannot read", + "FATAL ERROR", + "ERR_MODULE_NOT_FOUND", + "Error: Cannot find module", + "SIGSEGV", + "SIGABRT", + ]; + + for (const marker of crashMarkers) { + assert.ok( + !output.includes(marker), + `output should not contain crash marker '${marker}':\n${output.slice(0, 500)}`, + ); + } +} + +// =========================================================================== +// 1. JSON batch mode suppresses streaming — stdout is a single JSON result +// =========================================================================== + +test("headless --output-format json emits a single HeadlessJsonResult on stdout", async (t) => { + const tmpDir = createTempWithGsd("gsd-e2e-json-batch-"); + t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); + + // --max-restarts 0 prevents retry loops which would emit multiple JSON results. + // --timeout 2000 ensures the process completes quickly. + // Will timeout/error (no API key) but JSON batch mode should emit one HeadlessJsonResult. + const result = await runGsd( + ["headless", "--output-format", "json", "--timeout", "2000", "--max-restarts", "0", "auto"], + 45_000, // generous harness timeout — process needs ~4-6s (2s timeout + startup + cleanup) + {}, + tmpDir, + ); + + assert.ok(!result.timedOut, "test harness should not time out"); + // Non-zero exit expected (no API key / timeout), but process may exit 0 + // if auto-mode detects a conflict and completes immediately. + assert.ok(result.code !== null, "process should exit with a code"); + + const stdout = result.stdout.trim(); + assert.ok(stdout.length > 0, `stdout should contain the JSON result, got empty. stderr: ${stripAnsi(result.stderr).slice(0, 300)}`); + + // Must parse as a single JSON object (not NDJSON with multiple lines) + let parsed: Record; + try { + parsed = JSON.parse(stdout); + } catch (e) { + assert.fail( + `stdout should be valid JSON, got parse error: ${(e as Error).message}\nstdout: ${stdout.slice(0, 500)}`, + ); + } + + // Assert HeadlessJsonResult shape + assert.equal(typeof parsed.status, "string", "result should have a string 'status' field"); + assert.equal(typeof parsed.exitCode, "number", "result should have a number 'exitCode' field"); + assert.equal(typeof parsed.duration, "number", "result should have a number 'duration' field"); + assert.equal(typeof parsed.cost, "object", "result should have a 'cost' object"); + assert.equal(typeof parsed.toolCalls, "number", "result should have a number 'toolCalls' field"); + assert.equal(typeof parsed.events, "number", "result should have a number 'events' field"); + + // Must NOT be NDJSON (multiple newline-separated JSON objects) + const lines = stdout.split("\n").filter((l: string) => l.trim().length > 0); + assert.equal(lines.length, 1, `expected exactly one JSON line in stdout, got ${lines.length}`); + + const combined = stripAnsi(result.stdout + result.stderr); + assertNoCrashMarkers(combined); +}); + +// =========================================================================== +// 2. SIGINT produces exit code 11 (EXIT_CANCELLED) +// =========================================================================== + +test("headless exits with code 11 after SIGINT", async (t) => { + const tmpDir = createTempWithGsd("gsd-e2e-sigint-"); + t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); + + // Spawn with long timeout and max-restarts 0 so the process stays alive + // waiting for completion while we send SIGINT. + const { child, result: resultPromise } = spawnGsd( + ["headless", "--timeout", "60000", "--max-restarts", "0", "--context-text", "Test context for SIGINT", "new-milestone"], + 30_000, + {}, + tmpDir, + ); + + // Wait for stderr output to confirm the process has started and registered + // its SIGINT handler (handler is registered before client.start in runHeadlessOnce). + let stderrSoFar = ""; + await new Promise((resolve) => { + const check = () => { + if (stderrSoFar.length > 0) { + resolve(); + } + }; + child.stderr!.on("data", (chunk: Buffer) => { + stderrSoFar += chunk.toString(); + check(); + }); + // Fallback: resolve after 4s even if no stderr + setTimeout(resolve, 4000); + }); + + // Send SIGINT + child.kill("SIGINT"); + + const result = await resultPromise; + assert.ok(!result.timedOut, "test harness should not time out"); + + const stderr = stripAnsi(result.stderr); + + // In environments where the process completes before SIGINT arrives + // (e.g., existing auto-mode session causes immediate conflict exit), + // exit code may be 0 or 1 instead of 11. The test verifies the + // handler's behavior when it can be observed. + if (stderr.includes("Interrupted")) { + // SIGINT handler fired — verify exit code 11 + assert.strictEqual( + result.code, 11, + `SIGINT handler fired but exit code was ${result.code}, expected 11 (EXIT_CANCELLED)`, + ); + } else { + // Process exited before SIGINT arrived — acceptable in environments + // with running gsd sessions that cause auto-mode conflict. + // Verify it at least didn't crash. + const combined = stripAnsi(result.stdout + result.stderr); + assertNoCrashMarkers(combined); + assert.ok( + result.code === 0 || result.code === 1 || result.code === 11, + `expected clean exit (0, 1, or 11), got ${result.code}`, + ); + } +}); + +// =========================================================================== +// 3. stream-json emits NDJSON on stdout (each line is valid JSON) +// =========================================================================== + +test("headless --output-format stream-json emits NDJSON on stdout", async (t) => { + const tmpDir = createTempWithGsd("gsd-e2e-stream-json-"); + t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); + + // --max-restarts 0 to prevent retry loops that extend runtime. + const result = await runGsd( + ["headless", "--output-format", "stream-json", "--timeout", "2000", "--max-restarts", "0", "auto"], + 45_000, // generous harness timeout + {}, + tmpDir, + ); + + assert.ok(!result.timedOut, "test harness should not time out"); + // Non-zero exit expected (no API key / timeout), but 0 is acceptable + // if auto-mode completes immediately (session conflict). + assert.ok(result.code !== null, "process should exit with a code"); + + const stdout = result.stdout.trim(); + + // stream-json may produce zero events if the process errors before any + // events fire — that's valid. But if there IS stdout, every line must + // be valid JSON (NDJSON format). + if (stdout.length > 0) { + const lines = stdout.split("\n").filter((l: string) => l.trim().length > 0); + assert.ok(lines.length > 0, "if stdout has content, it should have at least one line"); + + for (let i = 0; i < lines.length; i++) { + try { + JSON.parse(lines[i]); + } catch (e) { + assert.fail( + `stdout line ${i + 1} is not valid JSON: ${(e as Error).message}\nline: ${lines[i].slice(0, 300)}`, + ); + } + } + + // Multiple NDJSON lines (not a single batch object) is expected + // for stream-json mode when events fire + } + + const combined = stripAnsi(result.stdout + result.stderr); + assertNoCrashMarkers(combined); +}); + +// =========================================================================== +// 4. --resume with nonexistent ID exits 1 with clean error +// =========================================================================== + +test("headless --resume with nonexistent ID exits 1 with descriptive error", async (t) => { + const tmpDir = createTempWithGsd("gsd-e2e-resume-bad-"); + t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); + + const result = await runGsd( + ["headless", "--resume", "nonexistent-id-xyz", "--max-restarts", "0", "auto"], + 30_000, + {}, + tmpDir, + ); + + assert.ok(!result.timedOut, "test harness should not time out"); + assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`); + + const stderr = stripAnsi(result.stderr); + + // The error should mention the bad ID or "No session matching" + assert.ok( + stderr.includes("nonexistent-id-xyz") || stderr.includes("No session matching"), + `stderr should mention the bad session ID or 'No session matching', got:\n${stderr.slice(0, 500)}`, + ); + + const combined = stripAnsi(result.stdout + result.stderr); + assertNoCrashMarkers(combined); +}); + +// =========================================================================== +// 5. --output-format with invalid value exits 1 with helpful message +// =========================================================================== + +test("headless --output-format with invalid value exits 1", async (t) => { + const tmpDir = createTempWithGsd("gsd-e2e-bad-format-"); + t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); + + const result = await runGsd( + ["headless", "--output-format", "invalid-format", "auto"], + 15_000, + {}, + tmpDir, + ); + + assert.ok(!result.timedOut, "test harness should not time out"); + assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`); + + const stderr = stripAnsi(result.stderr); + + // Should mention valid formats + assert.ok( + stderr.includes("text") && stderr.includes("json") && stderr.includes("stream-json"), + `stderr should list valid output formats, got:\n${stderr.slice(0, 500)}`, + ); + + // Should mention what was provided + assert.ok( + stderr.includes("invalid-format"), + `stderr should echo the invalid value, got:\n${stderr.slice(0, 500)}`, + ); + + const combined = stripAnsi(result.stdout + result.stderr); + assertNoCrashMarkers(combined); +}); From 97de0a6d94dc0df9956c1294ed992c34f466c1e0 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 27 Mar 2026 14:54:34 +0000 Subject: [PATCH 05/27] release: v2.54.0 --- CHANGELOG.md | 9 ++++++++- native/npm/darwin-arm64/package.json | 2 +- native/npm/darwin-x64/package.json | 2 +- native/npm/linux-arm64-gnu/package.json | 2 +- native/npm/linux-x64-gnu/package.json | 2 +- native/npm/win32-x64-msvc/package.json | 2 +- package.json | 2 +- packages/pi-coding-agent/package.json | 2 +- pkg/package.json | 2 +- 9 files changed, 16 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2cf19d8e7..a5e7be9eb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,12 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] +## [2.54.0] - 2026-03-27 + +### Added +- Headless Integration Hardening & Release (M002) (#2811) +- **parallel**: add real-time TUI monitor dashboard with self-healing (#2799) + ## [2.53.0] - 2026-03-27 ### Added @@ -2073,7 +2079,8 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ### Changed - License updated to MIT -[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.53.0...HEAD +[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.54.0...HEAD +[2.54.0]: https://github.com/gsd-build/gsd-2/compare/v2.53.0...v2.54.0 [2.53.0]: https://github.com/gsd-build/gsd-2/compare/v2.52.0...v2.53.0 [2.52.0]: https://github.com/gsd-build/gsd-2/compare/v2.51.0...v2.52.0 [2.51.0]: https://github.com/gsd-build/gsd-2/compare/v2.50.0...v2.51.0 diff --git a/native/npm/darwin-arm64/package.json b/native/npm/darwin-arm64/package.json index 0911fe2e5..a318abc8b 100644 --- a/native/npm/darwin-arm64/package.json +++ b/native/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-darwin-arm64", - "version": "2.53.0", + "version": "2.54.0", "description": "GSD native engine binary for macOS ARM64", "os": [ "darwin" diff --git a/native/npm/darwin-x64/package.json b/native/npm/darwin-x64/package.json index 12d435d25..e4b5cafd8 100644 --- a/native/npm/darwin-x64/package.json +++ b/native/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-darwin-x64", - "version": "2.53.0", + "version": "2.54.0", "description": "GSD native engine binary for macOS Intel", "os": [ "darwin" diff --git a/native/npm/linux-arm64-gnu/package.json b/native/npm/linux-arm64-gnu/package.json index ec4d8e9c7..a696bcffe 100644 --- a/native/npm/linux-arm64-gnu/package.json +++ b/native/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-linux-arm64-gnu", - "version": "2.53.0", + "version": "2.54.0", "description": "GSD native engine binary for Linux ARM64 (glibc)", "os": [ "linux" diff --git a/native/npm/linux-x64-gnu/package.json b/native/npm/linux-x64-gnu/package.json index 60a9b25bc..5476abbe3 100644 --- a/native/npm/linux-x64-gnu/package.json +++ b/native/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-linux-x64-gnu", - "version": "2.53.0", + "version": "2.54.0", "description": "GSD native engine binary for Linux x64 (glibc)", "os": [ "linux" diff --git a/native/npm/win32-x64-msvc/package.json b/native/npm/win32-x64-msvc/package.json index 02311ed7d..0281d3215 100644 --- a/native/npm/win32-x64-msvc/package.json +++ b/native/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-win32-x64-msvc", - "version": "2.53.0", + "version": "2.54.0", "description": "GSD native engine binary for Windows x64 (MSVC)", "os": [ "win32" diff --git a/package.json b/package.json index 6ce995ad9..df45caf1c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "gsd-pi", - "version": "2.53.0", + "version": "2.54.0", "description": "GSD — Get Shit Done coding agent", "license": "MIT", "repository": { diff --git a/packages/pi-coding-agent/package.json b/packages/pi-coding-agent/package.json index 019803620..f986489ec 100644 --- a/packages/pi-coding-agent/package.json +++ b/packages/pi-coding-agent/package.json @@ -1,6 +1,6 @@ { "name": "@gsd/pi-coding-agent", - "version": "2.53.0", + "version": "2.54.0", "description": "Coding agent CLI (vendored from pi-mono)", "type": "module", "piConfig": { diff --git a/pkg/package.json b/pkg/package.json index 7457973b7..73f7bf62c 100644 --- a/pkg/package.json +++ b/pkg/package.json @@ -1,6 +1,6 @@ { "name": "@glittercowboy/gsd", - "version": "2.53.0", + "version": "2.54.0", "piConfig": { "name": "gsd", "configDir": ".gsd" From c5907c3677c607578f5804bff0d9c4e65576723b Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 16:53:35 +0100 Subject: [PATCH 06/27] fix(interactive): fully remove providers from /providers (#2852) * test(integration): suppress npm pack buffer overflows * fix(interactive): fully remove providers from /providers --- .../src/core/model-registry.ts | 2 +- .../components/provider-manager.ts | 18 ++- src/tests/integration/pack-install.test.ts | 32 +++-- src/tests/provider-manager-remove.test.ts | 134 ++++++++++++++++++ 4 files changed, 174 insertions(+), 12 deletions(-) create mode 100644 src/tests/provider-manager-remove.test.ts diff --git a/packages/pi-coding-agent/src/core/model-registry.ts b/packages/pi-coding-agent/src/core/model-registry.ts index 47c6b68a5..42714560c 100644 --- a/packages/pi-coding-agent/src/core/model-registry.ts +++ b/packages/pi-coding-agent/src/core/model-registry.ts @@ -235,7 +235,7 @@ export class ModelRegistry { constructor( readonly authStorage: AuthStorage, - private modelsJsonPath: string | undefined = join(getAgentDir(), "models.json"), + readonly modelsJsonPath: string | undefined = join(getAgentDir(), "models.json"), ) { this.discoveryCache = new ModelDiscoveryCache(); diff --git a/packages/pi-coding-agent/src/modes/interactive/components/provider-manager.ts b/packages/pi-coding-agent/src/modes/interactive/components/provider-manager.ts index 5944d8c78..9129b746f 100644 --- a/packages/pi-coding-agent/src/modes/interactive/components/provider-manager.ts +++ b/packages/pi-coding-agent/src/modes/interactive/components/provider-manager.ts @@ -14,6 +14,7 @@ import { import type { AuthStorage } from "../../../core/auth-storage.js"; import { getDiscoverableProviders } from "../../../core/model-discovery.js"; import type { ModelRegistry } from "../../../core/model-registry.js"; +import { ModelsJsonWriter } from "../../../core/models-json-writer.js"; import { theme } from "../theme/theme.js"; import { rawKeyHint } from "./keybinding-hints.js"; @@ -39,6 +40,7 @@ export class ProviderManagerComponent extends Container implements Focusable { private tui: TUI; private authStorage: AuthStorage; private modelRegistry: ModelRegistry; + private modelsJsonWriter: ModelsJsonWriter; private onDone: () => void; private onDiscover: (provider: string) => void; @@ -54,6 +56,7 @@ export class ProviderManagerComponent extends Container implements Focusable { this.tui = tui; this.authStorage = authStorage; this.modelRegistry = modelRegistry; + this.modelsJsonWriter = new ModelsJsonWriter(this.modelRegistry.modelsJsonPath); this.onDone = onDone; this.onDiscover = onDiscover; @@ -64,7 +67,7 @@ export class ProviderManagerComponent extends Container implements Focusable { // Hints const hints = [ rawKeyHint("d", "discover"), - rawKeyHint("r", "remove auth"), + rawKeyHint("r", "remove"), rawKeyHint("esc", "close"), ].join(" "); this.addChild(new Text(hints, 0, 0)); @@ -102,6 +105,15 @@ export class ProviderManagerComponent extends Container implements Focusable { supportsDiscovery: discoverableSet.has(name), modelCount: providerModelCounts.get(name) ?? 0, })); + this.clampSelectedIndex(); + } + + private clampSelectedIndex(): void { + if (this.providers.length === 0) { + this.selectedIndex = 0; + return; + } + this.selectedIndex = Math.min(this.selectedIndex, this.providers.length - 1); } private updateList(): void { @@ -152,8 +164,10 @@ export class ProviderManagerComponent extends Container implements Focusable { } } else if (keyData === "r" || keyData === "R") { const provider = this.providers[this.selectedIndex]; - if (provider?.hasAuth) { + if (provider) { this.authStorage.remove(provider.name); + this.modelsJsonWriter.removeProvider(provider.name); + this.modelRegistry.refresh(); this.loadProviders(); this.updateList(); this.tui.requestRender(); diff --git a/src/tests/integration/pack-install.test.ts b/src/tests/integration/pack-install.test.ts index 40b764d4b..e69b03ee0 100644 --- a/src/tests/integration/pack-install.test.ts +++ b/src/tests/integration/pack-install.test.ts @@ -49,6 +49,27 @@ function createNpmSandbox(prefix: string): NpmSandbox { }; } +function buildQuietNpmEnv(sandbox: NpmSandbox): NodeJS.ProcessEnv { + return { + ...sandbox.env, + NPM_CONFIG_LOGLEVEL: "error", + npm_config_loglevel: "error", + NPM_CONFIG_FUND: "false", + npm_config_fund: "false", + NPM_CONFIG_AUDIT: "false", + npm_config_audit: "false", + }; +} + +function runNpmQuiet(args: string[], sandbox: NpmSandbox): void { + execFileSync("npm", args, { + cwd: projectRoot, + env: buildQuietNpmEnv(sandbox), + stdio: "ignore", + maxBuffer: 16 * 1024 * 1024, + }); +} + function packTarball(sandbox: NpmSandbox): string { const pkg = JSON.parse(readFileSync(join(projectRoot, "package.json"), "utf-8")); const safeName = pkg.name.replace(/^@[^/]+\//, "").replace(/\//g, "-"); @@ -56,11 +77,7 @@ function packTarball(sandbox: NpmSandbox): string { const packDestination = join(sandbox.rootDir, "pack-output"); mkdirSync(packDestination, { recursive: true }); - execFileSync("npm", ["pack", "--pack-destination", packDestination], { - cwd: projectRoot, - env: sandbox.env, - stdio: ["ignore", "ignore", "pipe"], - }); + runNpmQuiet(["pack", "--pack-destination", packDestination], sandbox); return join(packDestination, tarball); } @@ -141,10 +158,7 @@ test("tarball installs and gsd binary resolves", async (t) => { }); // Install from tarball into a temp prefix - execFileSync("npm", ["install", "--prefix", sandbox.installPrefix, tarballPath, "--no-save"], { - env: sandbox.env, - stdio: ["ignore", "ignore", "pipe"], - }); + runNpmQuiet(["install", "--prefix", sandbox.installPrefix, tarballPath, "--no-save"], sandbox); // Verify the gsd bin exists in the installed package const binName = process.platform === "win32" ? "gsd.cmd" : "gsd"; diff --git a/src/tests/provider-manager-remove.test.ts b/src/tests/provider-manager-remove.test.ts new file mode 100644 index 000000000..e7faf9b0e --- /dev/null +++ b/src/tests/provider-manager-remove.test.ts @@ -0,0 +1,134 @@ +import test from "node:test"; +import assert from "node:assert/strict"; +import { mkdtempSync, readFileSync, rmSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +const { ModelsJsonWriter } = await import("../../packages/pi-coding-agent/src/core/models-json-writer.ts"); +const { ProviderManagerComponent } = await import( + "../../packages/pi-coding-agent/src/modes/interactive/components/provider-manager.ts" +); +const { initTheme } = await import( + "../../packages/pi-coding-agent/src/modes/interactive/theme/theme.ts" +); + +initTheme(); + +function createTempModelsJsonPath(): string { + const dir = mkdtempSync(join(tmpdir(), "provider-manager-test-")); + return join(dir, "models.json"); +} + +function readProviders(modelsJsonPath: string): string[] { + const config = JSON.parse(readFileSync(modelsJsonPath, "utf-8")) as { + providers?: Record; + }; + return Object.keys(config.providers ?? {}).sort(); +} + +function createComponent(options: { + modelsJsonPath: string; + authProviders?: string[]; + providers: Array<{ name: string; modelIds: string[] }>; +}) { + const writer = new ModelsJsonWriter(options.modelsJsonPath); + for (const provider of options.providers) { + writer.setProvider(provider.name, { + models: provider.modelIds.map((id: string) => ({ id })), + }); + } + + const authProviders = new Set(options.authProviders ?? []); + const removedProviders: string[] = []; + let refreshCalls = 0; + let renderCalls = 0; + + const authStorage = { + hasAuth(provider: string) { + return authProviders.has(provider); + }, + remove(provider: string) { + removedProviders.push(provider); + authProviders.delete(provider); + }, + } as any; + + const modelRegistry = { + modelsJsonPath: options.modelsJsonPath, + getAll() { + const config = JSON.parse(readFileSync(options.modelsJsonPath, "utf-8")) as { + providers?: Record }>; + }; + return Object.entries(config.providers ?? {}).flatMap(([provider, providerConfig]) => + (providerConfig.models ?? []).map((model) => ({ + id: model.id, + provider, + })), + ); + }, + refresh() { + refreshCalls += 1; + }, + } as any; + + const tui = { + requestRender() { + renderCalls += 1; + }, + } as any; + + const component = new ProviderManagerComponent(tui, authStorage, modelRegistry, () => {}, () => {}); + return { + component, + removedProviders, + getRefreshCalls: () => refreshCalls, + getRenderCalls: () => renderCalls, + }; +} + +test("provider manager removes provider models and refreshes even when no auth is stored", (t) => { + const modelsJsonPath = createTempModelsJsonPath(); + const rootDir = join(modelsJsonPath, ".."); + t.after(() => rmSync(rootDir, { recursive: true, force: true })); + + const { component, removedProviders, getRefreshCalls, getRenderCalls } = createComponent({ + modelsJsonPath, + providers: [{ name: "custom", modelIds: ["local-model"] }], + }); + + component.handleInput("r"); + + assert.deepEqual(removedProviders, ["custom"]); + assert.deepEqual(readProviders(modelsJsonPath), []); + assert.equal(getRefreshCalls(), 1); + assert.equal(getRenderCalls(), 1); + assert.ok(!(component as any).providers.some((provider: { name: string; modelCount: number }) => + provider.name === "custom" || provider.modelCount > 0, + )); + assert.equal((component as any).selectedIndex, 0); +}); + +test("provider manager clamps selection after removing the selected provider", (t) => { + const modelsJsonPath = createTempModelsJsonPath(); + const rootDir = join(modelsJsonPath, ".."); + t.after(() => rmSync(rootDir, { recursive: true, force: true })); + + const { component } = createComponent({ + modelsJsonPath, + authProviders: ["zeta"], + providers: [ + { name: "alpha", modelIds: ["a-1"] }, + { name: "zeta", modelIds: ["z-1"] }, + ], + }); + + (component as any).selectedIndex = (component as any).providers.findIndex( + (provider: { name: string }) => provider.name === "zeta", + ); + component.handleInput("r"); + + assert.deepEqual(readProviders(modelsJsonPath), ["alpha"]); + assert.ok(!(component as any).providers.some((provider: { name: string }) => provider.name === "zeta")); + assert.ok((component as any).selectedIndex >= 0); + assert.ok((component as any).selectedIndex < (component as any).providers.length); +}); From 905ee092cec5adb5ea7c9c53f40e7011460678a8 Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 16:53:51 +0100 Subject: [PATCH 07/27] fix(gsd): enable dynamic routing without models section (#2851) * test(integration): suppress npm pack buffer overflows * fix(gsd): enable dynamic routing without models section --- .../extensions/gsd/auto-model-selection.ts | 22 ++- .../gsd/tests/auto-model-selection.test.ts | 139 ++++++++++++++++++ 2 files changed, 160 insertions(+), 1 deletion(-) create mode 100644 src/resources/extensions/gsd/tests/auto-model-selection.test.ts diff --git a/src/resources/extensions/gsd/auto-model-selection.ts b/src/resources/extensions/gsd/auto-model-selection.ts index 5523854d3..7929f94be 100644 --- a/src/resources/extensions/gsd/auto-model-selection.ts +++ b/src/resources/extensions/gsd/auto-model-selection.ts @@ -18,6 +18,26 @@ export interface ModelSelectionResult { routing: { tier: string; modelDowngraded: boolean } | null; } +export function resolvePreferredModelConfig( + unitType: string, + autoModeStartModel: { provider: string; id: string } | null, +) { + const explicitConfig = resolveModelWithFallbacksForUnit(unitType); + if (explicitConfig) return explicitConfig; + + const routingConfig = resolveDynamicRoutingConfig(); + if (!routingConfig.enabled || !routingConfig.tier_models) return undefined; + + const ceilingModel = routingConfig.tier_models.heavy + ?? (autoModeStartModel ? `${autoModeStartModel.provider}/${autoModeStartModel.id}` : undefined); + if (!ceilingModel) return undefined; + + return { + primary: ceilingModel, + fallbacks: [], + }; +} + /** * Select and apply the appropriate model for a unit dispatch. * Handles: per-unit-type model preferences, dynamic complexity routing, @@ -36,7 +56,7 @@ export async function selectAndApplyModel( autoModeStartModel: { provider: string; id: string } | null, retryContext?: { isRetry: boolean; previousTier?: string }, ): Promise { - const modelConfig = resolveModelWithFallbacksForUnit(unitType); + const modelConfig = resolvePreferredModelConfig(unitType, autoModeStartModel); let routing: { tier: string; modelDowngraded: boolean } | null = null; if (modelConfig) { diff --git a/src/resources/extensions/gsd/tests/auto-model-selection.test.ts b/src/resources/extensions/gsd/tests/auto-model-selection.test.ts new file mode 100644 index 000000000..2bc41fa9e --- /dev/null +++ b/src/resources/extensions/gsd/tests/auto-model-selection.test.ts @@ -0,0 +1,139 @@ +import test from "node:test"; +import assert from "node:assert/strict"; +import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; + +import { resolvePreferredModelConfig } from "../auto-model-selection.js"; + +function makeTempDir(prefix: string): string { + return mkdtempSync(join(tmpdir(), prefix)); +} + +test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models section is absent", () => { + const originalCwd = process.cwd(); + const originalGsdHome = process.env.GSD_HOME; + const tempProject = makeTempDir("gsd-routing-project-"); + const tempGsdHome = makeTempDir("gsd-routing-home-"); + + try { + mkdirSync(join(tempProject, ".gsd"), { recursive: true }); + writeFileSync( + join(tempProject, ".gsd", "PREFERENCES.md"), + [ + "---", + "dynamic_routing:", + " enabled: true", + " tier_models:", + " light: claude-haiku-4-5", + " standard: claude-sonnet-4-6", + " heavy: claude-opus-4-6", + "---", + ].join("\n"), + "utf-8", + ); + process.env.GSD_HOME = tempGsdHome; + process.chdir(tempProject); + + const config = resolvePreferredModelConfig("plan-slice", { + provider: "anthropic", + id: "claude-sonnet-4-6", + }); + + assert.deepEqual(config, { + primary: "claude-opus-4-6", + fallbacks: [], + }); + } finally { + process.chdir(originalCwd); + if (originalGsdHome === undefined) delete process.env.GSD_HOME; + else process.env.GSD_HOME = originalGsdHome; + rmSync(tempProject, { recursive: true, force: true }); + rmSync(tempGsdHome, { recursive: true, force: true }); + } +}); + +test("resolvePreferredModelConfig falls back to auto start model when heavy tier is absent", () => { + const originalCwd = process.cwd(); + const originalGsdHome = process.env.GSD_HOME; + const tempProject = makeTempDir("gsd-routing-project-"); + const tempGsdHome = makeTempDir("gsd-routing-home-"); + + try { + mkdirSync(join(tempProject, ".gsd"), { recursive: true }); + writeFileSync( + join(tempProject, ".gsd", "PREFERENCES.md"), + [ + "---", + "dynamic_routing:", + " enabled: true", + " tier_models:", + " light: claude-haiku-4-5", + " standard: claude-sonnet-4-6", + "---", + ].join("\n"), + "utf-8", + ); + process.env.GSD_HOME = tempGsdHome; + process.chdir(tempProject); + + const config = resolvePreferredModelConfig("execute-task", { + provider: "openai", + id: "gpt-5.4", + }); + + assert.deepEqual(config, { + primary: "openai/gpt-5.4", + fallbacks: [], + }); + } finally { + process.chdir(originalCwd); + if (originalGsdHome === undefined) delete process.env.GSD_HOME; + else process.env.GSD_HOME = originalGsdHome; + rmSync(tempProject, { recursive: true, force: true }); + rmSync(tempGsdHome, { recursive: true, force: true }); + } +}); + +test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", () => { + const originalCwd = process.cwd(); + const originalGsdHome = process.env.GSD_HOME; + const tempProject = makeTempDir("gsd-routing-project-"); + const tempGsdHome = makeTempDir("gsd-routing-home-"); + + try { + mkdirSync(join(tempProject, ".gsd"), { recursive: true }); + writeFileSync( + join(tempProject, ".gsd", "PREFERENCES.md"), + [ + "---", + "models:", + " planning: claude-sonnet-4-6", + "dynamic_routing:", + " enabled: true", + " tier_models:", + " heavy: claude-opus-4-6", + "---", + ].join("\n"), + "utf-8", + ); + process.env.GSD_HOME = tempGsdHome; + process.chdir(tempProject); + + const config = resolvePreferredModelConfig("plan-slice", { + provider: "anthropic", + id: "claude-opus-4-6", + }); + + assert.deepEqual(config, { + primary: "claude-sonnet-4-6", + fallbacks: [], + }); + } finally { + process.chdir(originalCwd); + if (originalGsdHome === undefined) delete process.env.GSD_HOME; + else process.env.GSD_HOME = originalGsdHome; + rmSync(tempProject, { recursive: true, force: true }); + rmSync(tempGsdHome, { recursive: true, force: true }); + } +}); From 50f95d6fa76f3bb3c47f164752c99bf29101fe43 Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 16:54:24 +0100 Subject: [PATCH 08/27] test(integration): suppress npm pack buffer overflows (#2843) From 2bc92afa6b73988404df14f21519f7b239de759a Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 16:54:31 +0100 Subject: [PATCH 09/27] fix(bg-shell): recover from deleted cwd in timers (#2850) * test(integration): suppress npm pack buffer overflows * fix(bg-shell): recover from deleted cwd in timers --- .../extensions/bg-shell/bg-shell-lifecycle.ts | 4 +- .../extensions/bg-shell/utilities.ts | 43 ++++++++++++-- .../gsd/bootstrap/register-extension.ts | 25 ++++++-- .../tests/register-extension-guard.test.ts | 59 +++++++++++++++++++ src/tests/bg-shell-persistence-cwd.test.ts | 20 ++++++- 5 files changed, 138 insertions(+), 13 deletions(-) create mode 100644 src/resources/extensions/gsd/tests/register-extension-guard.test.ts diff --git a/src/resources/extensions/bg-shell/bg-shell-lifecycle.ts b/src/resources/extensions/bg-shell/bg-shell-lifecycle.ts index 2f5766595..688db06c4 100644 --- a/src/resources/extensions/bg-shell/bg-shell-lifecycle.ts +++ b/src/resources/extensions/bg-shell/bg-shell-lifecycle.ts @@ -22,7 +22,7 @@ import { loadManifest, pruneDeadProcesses, } from "./process-manager.js"; -import { formatUptime, resolveBgShellPersistenceCwd } from "./utilities.js"; +import { formatUptime, getBgShellLiveCwd, resolveBgShellPersistenceCwd } from "./utilities.js"; import { formatTokenCount } from "../shared/format-utils.js"; import type { BgShellSharedState } from "./index.js"; @@ -213,7 +213,7 @@ export function registerBgShellLifecycle(pi: ExtensionAPI, state: BgShellSharedS return { render(width: number): string[] { // ── Line 1: pwd (branch) [session] ... bg status ── - let pwd = process.cwd(); + let pwd = getBgShellLiveCwd(state.latestCtx?.cwd); const home = process.env.HOME || process.env.USERPROFILE; if (home && pwd.startsWith(home)) { pwd = `~${pwd.slice(home.length)}`; diff --git a/src/resources/extensions/bg-shell/utilities.ts b/src/resources/extensions/bg-shell/utilities.ts index 9b17c130f..05b8fe654 100644 --- a/src/resources/extensions/bg-shell/utilities.ts +++ b/src/resources/extensions/bg-shell/utilities.ts @@ -42,16 +42,51 @@ export function formatTimeAgo(timestamp: number): string { return formatDuration(Date.now() - timestamp) + " ago"; } +function deriveProjectRootFromAutoWorktree(cachedCwd?: string): string | undefined { + if (!cachedCwd) return undefined; + const match = cachedCwd.match(/^(.*?)[\\/]\.gsd[\\/]worktrees[\\/][^\\/]+(?:[\\/].*)?$/); + return match?.[1]; +} + +export function getBgShellLiveCwd( + cachedCwd?: string, + pathExists: (path: string) => boolean = existsSync, + getCwd: () => string = () => process.cwd(), + chdir: (path: string) => void = (path) => process.chdir(path), +): string { + try { + return getCwd(); + } catch { + const projectRoot = deriveProjectRootFromAutoWorktree(cachedCwd); + const home = process.env.HOME || process.env.USERPROFILE; + const fallbacks = [projectRoot, cachedCwd, home, "/"].filter( + (candidate): candidate is string => Boolean(candidate), + ); + + for (const candidate of fallbacks) { + if (candidate !== "/" && !pathExists(candidate)) continue; + try { + chdir(candidate); + } catch { + // Best-effort only. Returning a known-good fallback is enough to avoid crashes. + } + return candidate; + } + + return "/"; + } +} export function resolveBgShellPersistenceCwd( cachedCwd: string, - liveCwd = process.cwd(), + liveCwd: string | undefined = undefined, pathExists: (path: string) => boolean = existsSync, ): string { + const resolvedLiveCwd = liveCwd ?? getBgShellLiveCwd(cachedCwd, pathExists); const cachedIsAutoWorktree = /(?:^|[\\/])\.gsd[\\/]worktrees[\\/]/.test(cachedCwd); if (!cachedIsAutoWorktree) return cachedCwd; - if (cachedCwd === liveCwd && pathExists(cachedCwd)) return cachedCwd; - if (!pathExists(cachedCwd)) return liveCwd; - if (liveCwd !== cachedCwd) return liveCwd; + if (cachedCwd === resolvedLiveCwd && pathExists(cachedCwd)) return cachedCwd; + if (!pathExists(cachedCwd)) return resolvedLiveCwd; + if (resolvedLiveCwd !== cachedCwd) return resolvedLiveCwd; return cachedCwd; } diff --git a/src/resources/extensions/gsd/bootstrap/register-extension.ts b/src/resources/extensions/gsd/bootstrap/register-extension.ts index 166d227ad..1e1b62f5a 100644 --- a/src/resources/extensions/gsd/bootstrap/register-extension.ts +++ b/src/resources/extensions/gsd/bootstrap/register-extension.ts @@ -9,14 +9,28 @@ import { registerJournalTools } from "./journal-tools.js"; import { registerHooks } from "./register-hooks.js"; import { registerShortcuts } from "./register-shortcuts.js"; +export function handleRecoverableExtensionProcessError(err: Error): boolean { + if ((err as NodeJS.ErrnoException).code === "EPIPE") { + process.exit(0); + } + if ((err as NodeJS.ErrnoException).code === "ENOENT") { + const syscall = (err as NodeJS.ErrnoException).syscall; + if (syscall?.startsWith("spawn")) { + process.stderr.write(`[gsd] spawn ENOENT: ${(err as any).path ?? "unknown"} — command not found\n`); + return true; + } + if (syscall === "uv_cwd") { + process.stderr.write(`[gsd] ENOENT (${syscall}): ${err.message}\n`); + return true; + } + } + return false; +} + function installEpipeGuard(): void { if (!process.listeners("uncaughtException").some((listener) => listener.name === "_gsdEpipeGuard")) { const _gsdEpipeGuard = (err: Error): void => { - if ((err as NodeJS.ErrnoException).code === "EPIPE") { - process.exit(0); - } - if ((err as NodeJS.ErrnoException).code === "ENOENT" && (err as any).syscall?.startsWith("spawn")) { - process.stderr.write(`[gsd] spawn ENOENT: ${(err as any).path ?? "unknown"} — command not found\n`); + if (handleRecoverableExtensionProcessError(err)) { return; } throw err; @@ -45,4 +59,3 @@ export function registerGsdExtension(pi: ExtensionAPI): void { registerShortcuts(pi); registerHooks(pi); } - diff --git a/src/resources/extensions/gsd/tests/register-extension-guard.test.ts b/src/resources/extensions/gsd/tests/register-extension-guard.test.ts new file mode 100644 index 000000000..9d926b852 --- /dev/null +++ b/src/resources/extensions/gsd/tests/register-extension-guard.test.ts @@ -0,0 +1,59 @@ +import test from "node:test"; +import assert from "node:assert/strict"; + +import { handleRecoverableExtensionProcessError } from "../bootstrap/register-extension.ts"; + +test("handleRecoverableExtensionProcessError swallows spawn ENOENT", () => { + let stderr = ""; + const originalWrite = process.stderr.write.bind(process.stderr); + process.stderr.write = ((chunk: string | Uint8Array) => { + stderr += String(chunk); + return true; + }) as typeof process.stderr.write; + + try { + const handled = handleRecoverableExtensionProcessError( + Object.assign(new Error("missing binary"), { + code: "ENOENT", + syscall: "spawn npm", + path: "npm", + }), + ); + assert.equal(handled, true); + assert.match(stderr, /spawn ENOENT: npm/); + } finally { + process.stderr.write = originalWrite; + } +}); + +test("handleRecoverableExtensionProcessError swallows uv_cwd ENOENT", () => { + let stderr = ""; + const originalWrite = process.stderr.write.bind(process.stderr); + process.stderr.write = ((chunk: string | Uint8Array) => { + stderr += String(chunk); + return true; + }) as typeof process.stderr.write; + + try { + const handled = handleRecoverableExtensionProcessError( + Object.assign(new Error("process.cwd failed"), { + code: "ENOENT", + syscall: "uv_cwd", + }), + ); + assert.equal(handled, true); + assert.match(stderr, /ENOENT \(uv_cwd\): process\.cwd failed/); + } finally { + process.stderr.write = originalWrite; + } +}); + +test("handleRecoverableExtensionProcessError leaves unrelated errors unhandled", () => { + const handled = handleRecoverableExtensionProcessError( + Object.assign(new Error("permission denied"), { + code: "EPERM", + syscall: "open", + }), + ); + assert.equal(handled, false); +}); diff --git a/src/tests/bg-shell-persistence-cwd.test.ts b/src/tests/bg-shell-persistence-cwd.test.ts index f1277b1e7..15e63f8e5 100644 --- a/src/tests/bg-shell-persistence-cwd.test.ts +++ b/src/tests/bg-shell-persistence-cwd.test.ts @@ -1,7 +1,10 @@ import test from "node:test"; import assert from "node:assert/strict"; -import { resolveBgShellPersistenceCwd } from "../resources/extensions/bg-shell/utilities.ts"; +import { + getBgShellLiveCwd, + resolveBgShellPersistenceCwd, +} from "../resources/extensions/bg-shell/utilities.ts"; test("keeps non-worktree cwd unchanged", () => { const cached = "/repo"; @@ -43,3 +46,18 @@ test("keeps current auto-worktree cwd when it still matches process cwd", () => cached, ); }); + +test("falls back to project root when process.cwd throws inside a stale auto-worktree", () => { + const cached = "/repo/.gsd/worktrees/M001"; + const live = getBgShellLiveCwd( + cached, + (path) => path === "/repo", + () => { + throw Object.assign(new Error("uv_cwd"), { code: "ENOENT", syscall: "uv_cwd" }); + }, + () => {}, + ); + + assert.equal(live, "/repo"); + assert.equal(resolveBgShellPersistenceCwd(cached, live, (path) => path === "/repo"), "/repo"); +}); From 67f78a73142652ccdc81ed8ba95527a9419fc090 Mon Sep 17 00:00:00 2001 From: Jean-Dominique Stepek Date: Fri, 27 Mar 2026 11:55:00 -0400 Subject: [PATCH 10/27] fix: detect monorepo roots in project discovery to prevent workspace fragmentation (#2849) When devRoot pointed at a monorepo, discoverProjects scanned one level deep and listed each workspace/package as a separate project. Now it checks for monorepo markers (pnpm-workspace.yaml, lerna.json, turbo.json, nx.json, rush.json, package.json workspaces) before scanning children. If the root is a monorepo, it returns it as a single project entry. - Add detectMonorepo() to bridge-service with support for 6 monorepo formats - Add isMonorepo signal to ProjectDetectionSignals - Update discoverProjects to short-circuit when root is a monorepo - Show 'Monorepo' tag in project list UI - Add 24 tests covering all monorepo detection scenarios --- .../web-project-discovery-contract.test.ts | 151 +++++++++++++++++- src/web/bridge-service.ts | 44 +++++ src/web/project-discovery-service.ts | 28 +++- .../gsd/onboarding/step-project.tsx | 2 + web/components/gsd/projects-view.tsx | 2 + web/lib/gsd-workspace-store.tsx | 1 + 6 files changed, 223 insertions(+), 5 deletions(-) diff --git a/src/tests/web-project-discovery-contract.test.ts b/src/tests/web-project-discovery-contract.test.ts index 351a75426..cd2c52fdd 100644 --- a/src/tests/web-project-discovery-contract.test.ts +++ b/src/tests/web-project-discovery-contract.test.ts @@ -2,12 +2,13 @@ import test, { after, describe } from "node:test"; import assert from "node:assert/strict"; import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs"; import { tmpdir } from "node:os"; -import { join } from "node:path"; +import { basename, join } from "node:path"; import { discoverProjects } from "../web/project-discovery-service.ts"; +import { detectMonorepo } from "../web/bridge-service.ts"; // --------------------------------------------------------------------------- -// Fixture setup +// Fixture setup — standard multi-project root // --------------------------------------------------------------------------- const tempRoot = mkdtempSync(join(tmpdir(), "gsd-project-discovery-")); @@ -38,16 +39,73 @@ mkdirSync(join(tempRoot, ".hidden")); // node_modules: should be excluded mkdirSync(join(tempRoot, "node_modules")); +// --------------------------------------------------------------------------- +// Fixture setup — monorepo roots +// --------------------------------------------------------------------------- + +// monorepo-pnpm: detected via pnpm-workspace.yaml +const monorepoPnpm = mkdtempSync(join(tmpdir(), "gsd-mono-pnpm-")); +mkdirSync(join(monorepoPnpm, ".git")); +writeFileSync(join(monorepoPnpm, "package.json"), '{"name":"my-monorepo"}'); +writeFileSync(join(monorepoPnpm, "pnpm-workspace.yaml"), 'packages:\n - "packages/*"'); +mkdirSync(join(monorepoPnpm, "packages")); +mkdirSync(join(monorepoPnpm, "packages", "pkg-a")); +mkdirSync(join(monorepoPnpm, "packages", "pkg-b")); + +// monorepo-lerna: detected via lerna.json +const monorepoLerna = mkdtempSync(join(tmpdir(), "gsd-mono-lerna-")); +mkdirSync(join(monorepoLerna, ".git")); +writeFileSync(join(monorepoLerna, "package.json"), '{"name":"lerna-mono"}'); +writeFileSync(join(monorepoLerna, "lerna.json"), '{"version":"1.0.0"}'); +mkdirSync(join(monorepoLerna, "backend")); +mkdirSync(join(monorepoLerna, "frontend")); + +// monorepo-workspaces: detected via package.json workspaces field +const monorepoWorkspaces = mkdtempSync(join(tmpdir(), "gsd-mono-ws-")); +mkdirSync(join(monorepoWorkspaces, ".git")); +writeFileSync(join(monorepoWorkspaces, "package.json"), '{"name":"ws-mono","workspaces":["packages/*"]}'); +mkdirSync(join(monorepoWorkspaces, "packages")); +mkdirSync(join(monorepoWorkspaces, "packages", "core")); +mkdirSync(join(monorepoWorkspaces, "packages", "ui")); + +// monorepo-turbo: detected via turbo.json +const monorepoTurbo = mkdtempSync(join(tmpdir(), "gsd-mono-turbo-")); +mkdirSync(join(monorepoTurbo, ".git")); +writeFileSync(join(monorepoTurbo, "package.json"), '{"name":"turbo-mono"}'); +writeFileSync(join(monorepoTurbo, "turbo.json"), '{"pipeline":{}}'); +mkdirSync(join(monorepoTurbo, "apps")); +mkdirSync(join(monorepoTurbo, "packages")); + +// monorepo-nx: detected via nx.json +const monorepoNx = mkdtempSync(join(tmpdir(), "gsd-mono-nx-")); +mkdirSync(join(monorepoNx, ".git")); +writeFileSync(join(monorepoNx, "package.json"), '{"name":"nx-mono"}'); +writeFileSync(join(monorepoNx, "nx.json"), '{}'); +mkdirSync(join(monorepoNx, "libs")); +mkdirSync(join(monorepoNx, "apps")); + +// non-monorepo: plain project with package.json (no workspaces, no marker files) +const plainProject = mkdtempSync(join(tmpdir(), "gsd-plain-project-")); +mkdirSync(join(plainProject, ".git")); +writeFileSync(join(plainProject, "package.json"), '{"name":"plain","dependencies":{}}'); +mkdirSync(join(plainProject, "src")); + // --------------------------------------------------------------------------- // Teardown // --------------------------------------------------------------------------- after(() => { rmSync(tempRoot, { recursive: true, force: true }); + rmSync(monorepoPnpm, { recursive: true, force: true }); + rmSync(monorepoLerna, { recursive: true, force: true }); + rmSync(monorepoWorkspaces, { recursive: true, force: true }); + rmSync(monorepoTurbo, { recursive: true, force: true }); + rmSync(monorepoNx, { recursive: true, force: true }); + rmSync(plainProject, { recursive: true, force: true }); }); // --------------------------------------------------------------------------- -// Tests +// Tests — standard multi-project root // --------------------------------------------------------------------------- describe("project-discovery", () => { @@ -122,3 +180,90 @@ describe("project-discovery", () => { assert.deepStrictEqual(results, []); }); }); + +// --------------------------------------------------------------------------- +// Tests — monorepo detection +// --------------------------------------------------------------------------- + +describe("detectMonorepo", () => { + test("detects pnpm-workspace.yaml", () => { + assert.ok(detectMonorepo(monorepoPnpm)); + }); + + test("detects lerna.json", () => { + assert.ok(detectMonorepo(monorepoLerna)); + }); + + test("detects package.json with workspaces field", () => { + assert.ok(detectMonorepo(monorepoWorkspaces)); + }); + + test("detects turbo.json", () => { + assert.ok(detectMonorepo(monorepoTurbo)); + }); + + test("detects nx.json", () => { + assert.ok(detectMonorepo(monorepoNx)); + }); + + test("does not detect plain project as monorepo", () => { + assert.ok(!detectMonorepo(plainProject)); + }); + + test("does not detect empty directory as monorepo", () => { + assert.ok(!detectMonorepo(tempRoot)); + }); +}); + +// --------------------------------------------------------------------------- +// Tests — monorepo root as devRoot returns single entry +// --------------------------------------------------------------------------- + +describe("project-discovery with monorepo root as devRoot", () => { + test("pnpm monorepo root returns single project entry", () => { + const results = discoverProjects(monorepoPnpm); + assert.equal(results.length, 1, `Expected 1 project, got ${results.length}: ${results.map(r => r.name).join(", ")}`); + assert.equal(results[0].path, monorepoPnpm); + assert.equal(results[0].name, basename(monorepoPnpm)); + assert.equal(results[0].signals.isMonorepo, true); + }); + + test("lerna monorepo root returns single project entry", () => { + const results = discoverProjects(monorepoLerna); + assert.equal(results.length, 1); + assert.equal(results[0].path, monorepoLerna); + assert.equal(results[0].signals.isMonorepo, true); + }); + + test("npm/yarn workspaces monorepo root returns single project entry", () => { + const results = discoverProjects(monorepoWorkspaces); + assert.equal(results.length, 1); + assert.equal(results[0].path, monorepoWorkspaces); + assert.equal(results[0].signals.isMonorepo, true); + }); + + test("turbo monorepo root returns single project entry", () => { + const results = discoverProjects(monorepoTurbo); + assert.equal(results.length, 1); + assert.equal(results[0].path, monorepoTurbo); + }); + + test("nx monorepo root returns single project entry", () => { + const results = discoverProjects(monorepoNx); + assert.equal(results.length, 1); + assert.equal(results[0].path, monorepoNx); + }); + + test("plain project (not monorepo) scans children normally", () => { + // plainProject has .git, package.json, src/ — not a monorepo + // Should scan children: just "src" + const results = discoverProjects(plainProject); + assert.ok(results.length >= 1, "should scan children for non-monorepo"); + assert.ok(results.some(r => r.name === "src"), "should find src directory"); + }); + + test("monorepo entry has correct kind (brownfield when no .gsd)", () => { + const results = discoverProjects(monorepoPnpm); + assert.equal(results[0].kind, "brownfield"); + }); +}); diff --git a/src/web/bridge-service.ts b/src/web/bridge-service.ts index c355086e8..f1faac3aa 100644 --- a/src/web/bridge-service.ts +++ b/src/web/bridge-service.ts @@ -526,6 +526,8 @@ export interface ProjectDetectionSignals { hasCargo?: boolean; hasGoMod?: boolean; hasPyproject?: boolean; + /** True when the directory looks like a monorepo root (workspaces, lerna, pnpm-workspace, etc.) */ + isMonorepo?: boolean; fileCount: number; } @@ -534,6 +536,46 @@ export interface ProjectDetection { signals: ProjectDetectionSignals; } +/** + * Detect whether a directory looks like a monorepo root. + * + * Checks for common monorepo indicators: + * - `pnpm-workspace.yaml` (pnpm workspaces) + * - `lerna.json` (Lerna) + * - `package.json` with a `workspaces` field (npm/yarn workspaces) + * - `rush.json` (Rush) + * - `nx.json` (Nx) + * - `turbo.json` (Turborepo) + * + * This is intentionally cheap — file existence checks only, with a single + * JSON parse for `package.json` workspaces (which we're already reading + * in many code paths). No deep directory scanning. + */ +export function detectMonorepo(dirPath: string, checkExists?: (path: string) => boolean): boolean { + const exists = checkExists ?? (getBridgeDeps().existsSync ?? existsSync); + + // Fast checks — file existence only + if (exists(join(dirPath, "pnpm-workspace.yaml"))) return true; + if (exists(join(dirPath, "lerna.json"))) return true; + if (exists(join(dirPath, "rush.json"))) return true; + if (exists(join(dirPath, "nx.json"))) return true; + if (exists(join(dirPath, "turbo.json"))) return true; + + // Check package.json for workspaces field (npm/yarn workspaces) + const packageJsonPath = join(dirPath, "package.json"); + if (exists(packageJsonPath)) { + try { + const raw = readFileSync(packageJsonPath, "utf-8"); + const pkg = JSON.parse(raw) as Record; + if (pkg.workspaces != null) return true; + } catch { + // Malformed JSON or unreadable — not a monorepo indicator + } + } + + return false; +} + export function detectProjectKind(projectCwd: string): ProjectDetection { const checkExists = getBridgeDeps().existsSync ?? existsSync; @@ -544,6 +586,7 @@ export function detectProjectKind(projectCwd: string): ProjectDetection { const hasCargo = checkExists(join(projectCwd, "Cargo.toml")); const hasGoMod = checkExists(join(projectCwd, "go.mod")); const hasPyproject = checkExists(join(projectCwd, "pyproject.toml")); + const isMonorepo = detectMonorepo(projectCwd, checkExists); // Count top-level non-dot entries (cheap heuristic for "has code") let fileCount = 0; @@ -562,6 +605,7 @@ export function detectProjectKind(projectCwd: string): ProjectDetection { hasCargo, hasGoMod, hasPyproject, + isMonorepo, fileCount, }; diff --git a/src/web/project-discovery-service.ts b/src/web/project-discovery-service.ts index c2b450e6c..86c468de4 100644 --- a/src/web/project-discovery-service.ts +++ b/src/web/project-discovery-service.ts @@ -1,7 +1,7 @@ import { readdirSync, readFileSync, statSync } from "node:fs"; -import { join } from "node:path"; +import { basename, join } from "node:path"; import type { ProjectDetectionKind, ProjectDetectionSignals } from "./bridge-service.ts"; -import { detectProjectKind } from "./bridge-service.ts"; +import { detectMonorepo, detectProjectKind } from "./bridge-service.ts"; // ─── Project Discovery ───────────────────────────────────────────────────── @@ -72,11 +72,35 @@ export function readProjectProgress(projectPath: string): ProjectProgressInfo | * discovered project directory. Hidden dirs (starting with `.`), `node_modules`, * and `.git` are excluded. * + * **Monorepo detection:** If `devRootPath` itself looks like a project root + * (has `.git`, `package.json`, monorepo markers like `pnpm-workspace.yaml` / + * `lerna.json` / `workspaces` in `package.json`), it is returned as a single + * project entry instead of scanning its children. This prevents monorepo + * subdirectories from being listed as independent projects. + * * Returns an empty array if `devRootPath` doesn't exist or isn't readable. * Results are sorted alphabetically by name. */ export function discoverProjects(devRootPath: string, includeProgress?: boolean): ProjectMetadata[] { try { + // ── Check if the root itself is a project/monorepo ────────────── + // If the devRoot has a .git repo AND looks like a monorepo (pnpm-workspace, + // lerna, workspaces, etc.) or looks like a standalone project root (has + // .gsd, or is a recognizable project), return it as a single entry. + const rootDetection = detectProjectKind(devRootPath); + if (rootDetection.signals.isMonorepo) { + const stat = statSync(devRootPath); + return [{ + name: basename(devRootPath), + path: devRootPath, + kind: rootDetection.kind, + signals: rootDetection.signals, + lastModified: stat.mtimeMs, + ...(includeProgress ? { progress: readProjectProgress(devRootPath) } : {}), + }]; + } + + // ── Standard multi-project scan ───────────────────────────────── const entries = readdirSync(devRootPath, { withFileTypes: true }); const projects: ProjectMetadata[] = []; diff --git a/web/components/gsd/onboarding/step-project.tsx b/web/components/gsd/onboarding/step-project.tsx index 6b783c2b5..6eeba3696 100644 --- a/web/components/gsd/onboarding/step-project.tsx +++ b/web/components/gsd/onboarding/step-project.tsx @@ -33,6 +33,7 @@ interface ProjectDetectionSignals { hasCargo?: boolean hasGoMod?: boolean hasPyproject?: boolean + isMonorepo?: boolean } interface ProjectProgressInfo { @@ -64,6 +65,7 @@ const KIND_STYLE: Record Date: Fri, 27 Mar 2026 16:55:19 +0100 Subject: [PATCH 11/27] fix(gsd): include queue context in milestone planning prompts (#2846) * test(integration): suppress npm pack buffer overflows * fix(gsd): include queue context in milestone planning prompts --- src/resources/extensions/gsd/auto-prompts.ts | 15 ++++++ .../plan-milestone-queue-context.test.ts | 48 +++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 src/resources/extensions/gsd/tests/plan-milestone-queue-context.test.ts diff --git a/src/resources/extensions/gsd/auto-prompts.ts b/src/resources/extensions/gsd/auto-prompts.ts index 5e1984c56..1ea0e3366 100644 --- a/src/resources/extensions/gsd/auto-prompts.ts +++ b/src/resources/extensions/gsd/auto-prompts.ts @@ -87,6 +87,11 @@ function buildSourceFilePaths( paths.push(`- **Decisions**: \`${relGsdRootFile("DECISIONS")}\``); } + const queuePath = resolveGsdRootFile(base, "QUEUE"); + if (existsSync(queuePath)) { + paths.push(`- **Queue**: \`${relGsdRootFile("QUEUE")}\``); + } + const contextPath = resolveMilestoneFile(base, mid, "CONTEXT"); if (contextPath) { paths.push(`- **Milestone Context**: \`${relMilestoneFile(base, mid, "CONTEXT")}\``); @@ -915,6 +920,16 @@ export async function buildPlanMilestonePrompt(mid: string, midTitle: string, ba const decisionsInline = await inlineDecisionsFromDb(base, mid, undefined, inlineLevel); if (decisionsInline) inlined.push(decisionsInline); } + const queuePath = resolveGsdRootFile(base, "QUEUE"); + if (existsSync(queuePath)) { + const queueInline = await inlineFileSmart( + queuePath, + relGsdRootFile("QUEUE"), + "Project Queue", + `${mid} ${midTitle}`, + ); + inlined.push(queueInline); + } const knowledgeInlinePM = await inlineGsdRootFile(base, "knowledge.md", "Project Knowledge"); if (knowledgeInlinePM) inlined.push(knowledgeInlinePM); inlined.push(inlineTemplate("roadmap", "Roadmap")); diff --git a/src/resources/extensions/gsd/tests/plan-milestone-queue-context.test.ts b/src/resources/extensions/gsd/tests/plan-milestone-queue-context.test.ts new file mode 100644 index 000000000..83a2f955d --- /dev/null +++ b/src/resources/extensions/gsd/tests/plan-milestone-queue-context.test.ts @@ -0,0 +1,48 @@ +import { describe, test } from "node:test"; +import assert from "node:assert/strict"; +import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; + +import { buildPlanMilestonePrompt } from "../auto-prompts.ts"; + +function createBase(): string { + const base = mkdtempSync(join(tmpdir(), "gsd-plan-queue-")); + mkdirSync(join(base, ".gsd", "milestones", "M010"), { recursive: true }); + return base; +} + +function cleanup(base: string): void { + rmSync(base, { recursive: true, force: true }); +} + +describe("plan-milestone queue context", () => { + test("includes queue brief when planning milestone without roadmap context", async () => { + const base = createBase(); + try { + writeFileSync( + join(base, ".gsd", "QUEUE.md"), + [ + "# Queue", + "", + "### M010: Analytics Dashboard — Interactivity, Intelligence & Demo Readiness", + "**Vision:** Ship a polished analytics dashboard with drilldowns and AI assistance.", + "", + "## Scope", + "- Interactivity", + "- Intelligence", + "- Demo readiness", + "", + ].join("\n"), + ); + + const prompt = await buildPlanMilestonePrompt("M010", "M010", base); + + assert.match(prompt, /Source: `\.gsd\/QUEUE\.md`/); + assert.match(prompt, /Analytics Dashboard — Interactivity, Intelligence & Demo Readiness/); + assert.match(prompt, /Ship a polished analytics dashboard/); + } finally { + cleanup(base); + } + }); +}); From 36930694e4952cdbd3a74e4d424271d9f59f8abf Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 18:29:03 +0100 Subject: [PATCH 12/27] fix(gsd): use project root for prior-slice dispatch guard (#2863) Resolve the prior-slice completion guard against originalBasePath when auto-mode is running in a worktree. This keeps completed upstream milestones from blocking new dispatches because their SUMMARY state lives at the project root, not the stale worktree snapshot. Closes #2838 Co-authored-by: Paperclip --- src/resources/extensions/gsd/auto/phases.ts | 16 +++++- .../gsd/tests/journal-integration.test.ts | 55 +++++++++++++++++++ 2 files changed, 69 insertions(+), 2 deletions(-) diff --git a/src/resources/extensions/gsd/auto/phases.ts b/src/resources/extensions/gsd/auto/phases.ts index 966247a5e..6269bfc0d 100644 --- a/src/resources/extensions/gsd/auto/phases.ts +++ b/src/resources/extensions/gsd/auto/phases.ts @@ -45,6 +45,17 @@ export function _resolveReportBasePath(s: Pick, +): string { + return s.originalBasePath || s.basePath; +} + /** * Generate and write an HTML milestone report snapshot. * Extracted from the milestone-transition block in autoLoop. @@ -667,9 +678,10 @@ export async function runDispatch( prompt = preDispatchResult.prompt; } + const guardBasePath = _resolveDispatchGuardBasePath(s); const priorSliceBlocker = deps.getPriorSliceCompletionBlocker( - s.basePath, - deps.getMainBranch(s.basePath), + guardBasePath, + deps.getMainBranch(guardBasePath), unitType, unitId, ); diff --git a/src/resources/extensions/gsd/tests/journal-integration.test.ts b/src/resources/extensions/gsd/tests/journal-integration.test.ts index 49f64d7a3..8447019ce 100644 --- a/src/resources/extensions/gsd/tests/journal-integration.test.ts +++ b/src/resources/extensions/gsd/tests/journal-integration.test.ts @@ -260,6 +260,61 @@ test("runDispatch emits dispatch-stop when dispatch returns stop action", async assert.equal(stopEvents[0].flowId, ic.flowId); }); +test("runDispatch checks prior-slice completion against the project root in worktree mode", async () => { + const capture = createEventCapture(); + const guardCalls: Array<{ fn: string; args: unknown[] }> = []; + const deps = makeMockDeps(capture, { + getMainBranch: (basePath: string) => { + guardCalls.push({ fn: "getMainBranch", args: [basePath] }); + return "main"; + }, + getPriorSliceCompletionBlocker: ( + basePath: string, + mainBranch: string, + unitType: string, + unitId: string, + ) => { + guardCalls.push({ + fn: "getPriorSliceCompletionBlocker", + args: [basePath, mainBranch, unitType, unitId], + }); + return null; + }, + }); + const ic = makeIC(deps, { + s: { + ...makeSession(), + basePath: "/tmp/project/.gsd/worktrees/M029-xoklo9", + originalBasePath: "/tmp/project", + } as any, + }); + const preData: PreDispatchData = { + state: { + phase: "executing", + activeMilestone: { id: "M029-xoklo9", title: "Test", status: "active" }, + activeSlice: { id: "S01", title: "Slice 1" }, + registry: [{ id: "M029-xoklo9", status: "active" }], + blockers: [], + } as any, + mid: "M029-xoklo9", + midTitle: "Test Milestone", + }; + + const result = await runDispatch(ic, preData, { + recentUnits: [], + stuckRecoveryAttempts: 0, + }); + + assert.equal(result.action, "next"); + assert.deepEqual(guardCalls, [ + { fn: "getMainBranch", args: ["/tmp/project"] }, + { + fn: "getPriorSliceCompletionBlocker", + args: ["/tmp/project", "main", "execute-task", "M001/S01/T01"], + }, + ]); +}); + test("runUnitPhase emits unit-start and unit-end with causedBy reference", async () => { const capture = createEventCapture(); From 1d5590c19a557e223cc25f5e436864b31c4441ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?T=C3=82CHES?= Date: Fri, 27 Mar 2026 12:13:17 -0600 Subject: [PATCH 13/27] feat: headless text mode observability + skip UAT pause (#2867) * feat: headless text mode shows tool calls + skip UAT pause in headless Text mode observability: - Tool calls always visible with summarized args (path, command, pattern) - Tool errors surfaced even in non-verbose mode - Cost updates shown periodically - Empty [status] lines suppressed (setStatus/setWidget are TUI-only) - Empty notify messages suppressed UAT pause skip: - Set GSD_HEADLESS=1 env var when spawning RPC child process - auto-dispatch checks GSD_HEADLESS and skips pauseAfterDispatch for UAT - Headless runs no longer stall waiting for human UAT verification * test: add formatProgress unit tests for headless text mode 16 tests covering tool call display, arg summarization, cost formatting, empty status suppression, and notify filtering. * ci: retrigger --- src/headless-ui.ts | 76 ++++++++- src/headless.ts | 2 + src/resources/extensions/gsd/auto-dispatch.ts | 2 +- src/tests/headless-progress.test.ts | 148 ++++++++++++++++++ 4 files changed, 221 insertions(+), 7 deletions(-) create mode 100644 src/tests/headless-progress.test.ts diff --git a/src/headless-ui.ts b/src/headless-ui.ts index 7beea6bef..e99d94755 100644 --- a/src/headless-ui.ts +++ b/src/headless-ui.ts @@ -82,9 +82,37 @@ export function formatProgress(event: Record, verbose: boolean) const type = String(event.type ?? '') switch (type) { - case 'tool_execution_start': - if (verbose) return ` [tool] ${event.toolName ?? 'unknown'}` + case 'tool_execution_start': { + const name = String(event.toolName ?? 'unknown') + const summary = summarizeToolArgs(name, event.args as Record | undefined) + return summary ? ` [tool] ${name} ${summary}` : ` [tool] ${name}` + } + + case 'tool_execution_end': { + if (verbose) { + const name = String(event.toolName ?? 'unknown') + const isError = Boolean(event.isError) + return isError ? ` [tool] ${name} ✗ error` : null + } + // In non-verbose, only surface errors + if (event.isError) { + const name = String(event.toolName ?? 'unknown') + return ` [tool] ${name} ✗ error` + } return null + } + + case 'cost_update': { + const cumCost = event.cumulativeCost as Record | undefined + const costUsd = Number(cumCost?.costUsd ?? 0) + if (costUsd > 0) { + const tokens = event.tokens as Record | undefined + const inK = tokens ? (tokens.input / 1000).toFixed(1) : '?' + const outK = tokens ? (tokens.output / 1000).toFixed(1) : '?' + return ` [cost] $${costUsd.toFixed(4)} (${inK}k in / ${outK}k out)` + } + return null + } case 'agent_start': return '[agent] Session started' @@ -94,11 +122,10 @@ export function formatProgress(event: Record, verbose: boolean) case 'extension_ui_request': if (event.method === 'notify') { - return `[gsd] ${event.message ?? ''}` - } - if (event.method === 'setStatus') { - return `[status] ${event.message ?? ''}` + const msg = String(event.message ?? '') + return msg ? `[gsd] ${msg}` : null } + // setStatus / setWidget are TUI-specific — suppress in text mode return null default: @@ -106,6 +133,43 @@ export function formatProgress(event: Record, verbose: boolean) } } +/** + * Extract a short summary from tool arguments for display. + * Returns null if nothing useful can be summarized. + */ +function summarizeToolArgs(toolName: string, args: Record | undefined): string | null { + if (!args) return null + + switch (toolName) { + case 'Read': + case 'read': + return args.path ? String(args.path) : null + case 'Write': + case 'write': + return args.path ? String(args.path) : null + case 'Edit': + case 'edit': + return args.path ? String(args.path) : null + case 'Bash': + case 'bash': { + const cmd = String(args.command ?? '') + return cmd.length > 80 ? cmd.slice(0, 77) + '...' : cmd || null + } + case 'Grep': + case 'grep': + return args.pattern ? `/${args.pattern}/` + (args.path ? ` in ${args.path}` : '') : null + case 'find': + return args.pattern ? String(args.pattern) + (args.path ? ` in ${args.path}` : '') : null + case 'lsp': + return args.action ? String(args.action) + (args.symbol ? ` ${args.symbol}` : '') : null + default: { + // For GSD tools, show the first string arg that looks like an ID or path + const first = Object.values(args).find(v => typeof v === 'string' && String(v).length < 80) + return first ? String(first) : null + } + } +} + // --------------------------------------------------------------------------- // Supervised Stdin Reader // --------------------------------------------------------------------------- diff --git a/src/headless.ts b/src/headless.ts index 4fe480501..5e54cac64 100644 --- a/src/headless.ts +++ b/src/headless.ts @@ -344,6 +344,8 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): if (injector) { clientOptions.env = injector.getSecretEnvVars() } + // Signal headless mode to the GSD extension (skips UAT human pause, etc.) + clientOptions.env = { ...(clientOptions.env as Record || {}), GSD_HEADLESS: '1' } // Propagate --bare to the child process if (options.bare) { clientOptions.args = [...((clientOptions.args as string[]) || []), '--bare'] diff --git a/src/resources/extensions/gsd/auto-dispatch.ts b/src/resources/extensions/gsd/auto-dispatch.ts index d6ba2424f..59fb2ac19 100644 --- a/src/resources/extensions/gsd/auto-dispatch.ts +++ b/src/resources/extensions/gsd/auto-dispatch.ts @@ -200,7 +200,7 @@ export const DISPATCH_RULES: DispatchRule[] = [ uatContent ?? "", basePath, ), - pauseAfterDispatch: uatType !== "artifact-driven" && uatType !== "browser-executable" && uatType !== "runtime-executable", + pauseAfterDispatch: !process.env.GSD_HEADLESS && uatType !== "artifact-driven" && uatType !== "browser-executable" && uatType !== "runtime-executable", }; }, }, diff --git a/src/tests/headless-progress.test.ts b/src/tests/headless-progress.test.ts new file mode 100644 index 000000000..0e55cbafa --- /dev/null +++ b/src/tests/headless-progress.test.ts @@ -0,0 +1,148 @@ +import { describe, it } from 'node:test' +import assert from 'node:assert/strict' +import { formatProgress } from '../headless-ui.js' + +describe('formatProgress', () => { + describe('tool_execution_start', () => { + it('shows tool name and summarized args', () => { + const result = formatProgress({ + type: 'tool_execution_start', + toolName: 'bash', + args: { command: 'npm run build' }, + }, false) + assert.equal(result, ' [tool] bash npm run build') + }) + + it('shows Read with file path', () => { + const result = formatProgress({ + type: 'tool_execution_start', + toolName: 'Read', + args: { path: 'src/main.ts' }, + }, false) + assert.equal(result, ' [tool] Read src/main.ts') + }) + + it('shows grep with pattern and path', () => { + const result = formatProgress({ + type: 'tool_execution_start', + toolName: 'grep', + args: { pattern: 'TODO', path: 'src/' }, + }, false) + assert.equal(result, ' [tool] grep /TODO/ in src/') + }) + + it('truncates long bash commands', () => { + const longCmd = 'a'.repeat(100) + const result = formatProgress({ + type: 'tool_execution_start', + toolName: 'bash', + args: { command: longCmd }, + }, false) + assert.ok(result!.endsWith('...')) + assert.ok(result!.length < 100) + }) + + it('shows tool name alone when no args', () => { + const result = formatProgress({ + type: 'tool_execution_start', + toolName: 'unknown_tool', + }, false) + assert.equal(result, ' [tool] unknown_tool') + }) + }) + + describe('tool_execution_end', () => { + it('shows error in non-verbose mode', () => { + const result = formatProgress({ + type: 'tool_execution_end', + toolName: 'bash', + isError: true, + }, false) + assert.equal(result, ' [tool] bash ✗ error') + }) + + it('suppresses success in non-verbose mode', () => { + const result = formatProgress({ + type: 'tool_execution_end', + toolName: 'bash', + isError: false, + }, false) + assert.equal(result, null) + }) + }) + + describe('cost_update', () => { + it('formats cost with token breakdown', () => { + const result = formatProgress({ + type: 'cost_update', + cumulativeCost: { costUsd: 0.0523 }, + tokens: { input: 4200, output: 1100 }, + }, false) + assert.equal(result, ' [cost] $0.0523 (4.2k in / 1.1k out)') + }) + + it('returns null for zero cost', () => { + const result = formatProgress({ + type: 'cost_update', + cumulativeCost: { costUsd: 0 }, + tokens: { input: 0, output: 0 }, + }, false) + assert.equal(result, null) + }) + }) + + describe('extension_ui_request', () => { + it('shows notify with message', () => { + const result = formatProgress({ + type: 'extension_ui_request', + method: 'notify', + message: 'Committed: fix auth', + }, false) + assert.equal(result, '[gsd] Committed: fix auth') + }) + + it('suppresses empty notify', () => { + const result = formatProgress({ + type: 'extension_ui_request', + method: 'notify', + message: '', + }, false) + assert.equal(result, null) + }) + + it('suppresses setStatus (TUI-only)', () => { + const result = formatProgress({ + type: 'extension_ui_request', + method: 'setStatus', + statusKey: 'gsd-auto', + statusText: 'auto', + }, false) + assert.equal(result, null) + }) + + it('suppresses setWidget (TUI-only)', () => { + const result = formatProgress({ + type: 'extension_ui_request', + method: 'setWidget', + widgetKey: 'progress', + }, false) + assert.equal(result, null) + }) + }) + + describe('agent lifecycle', () => { + it('shows agent_start', () => { + assert.equal(formatProgress({ type: 'agent_start' }, false), '[agent] Session started') + }) + + it('shows agent_end', () => { + assert.equal(formatProgress({ type: 'agent_end' }, false), '[agent] Session ended') + }) + }) + + describe('unknown events', () => { + it('returns null', () => { + assert.equal(formatProgress({ type: 'some_random_event' }, false), null) + }) + }) +}) From 666731f56d1049c2998a8c8311757ae3ea57eef5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?T=C3=82CHES?= Date: Fri, 27 Mar 2026 14:29:20 -0600 Subject: [PATCH 14/27] feat: colorized headless verbose output with thinking, phases, cost, and durations (#2886) * feat: colorized headless verbose output with thinking, phases, cost, and durations Overhaul --verbose text output to make agent activity observable: 1. ANSI color for all output categories (tool, agent, gsd, phase, cost, thinking, error) with NO_COLOR and non-TTY support 2. LLM thinking deltas condensed to ~120 char previews between tool calls 3. Phase tracking from setStatus events with statusKey parsing (suppresses empty [status] lines) 4. Cumulative cost shown on agent_end from tracked cost_update events 5. Tool call durations (start/end timestamp tracking) 6. Tool arg summarizer (file paths, bash commands, grep patterns) for context-rich [tool] lines Co-Authored-By: Claude Opus 4.6 (1M context) * test: update headless progress tests for new ProgressContext API and color features - Update formatProgress tests for ProgressContext signature (verbose, toolDuration, lastCost, isError) - Add summarizeToolArgs tests (file paths, bash commands, grep patterns, truncation) - Add formatThinkingLine tests (truncation, whitespace collapsing) - Add formatCostLine tests - Add phase tracking tests (setStatus with statusKey) - Add agent_end cost display tests - 28 tests, all passing --------- Co-authored-by: Claude Opus 4.6 (1M context) --- src/headless-ui.ts | 273 +++++++++++++++++++++------- src/headless.ts | 69 ++++++- src/tests/headless-progress.test.ts | 222 +++++++++++++++------- 3 files changed, 432 insertions(+), 132 deletions(-) diff --git a/src/headless-ui.ts b/src/headless-ui.ts index e99d94755..ca6fa4563 100644 --- a/src/headless-ui.ts +++ b/src/headless-ui.ts @@ -28,6 +28,116 @@ interface ExtensionUIRequest { export type { ExtensionUIRequest } +/** Context passed alongside an event for richer formatting. */ +export interface ProgressContext { + verbose: boolean + toolDuration?: number // ms, for tool_execution_end + lastCost?: { costUsd: number; inputTokens: number; outputTokens: number } + thinkingPreview?: string // accumulated LLM text to show before tool calls + isError?: boolean // tool execution ended with an error +} + +// --------------------------------------------------------------------------- +// ANSI Color Helpers +// --------------------------------------------------------------------------- + +const _c = { + reset: '\x1b[0m', + bold: '\x1b[1m', + dim: '\x1b[2m', + italic: '\x1b[3m', + red: '\x1b[31m', + green: '\x1b[32m', + yellow: '\x1b[33m', + cyan: '\x1b[36m', + gray: '\x1b[90m', +} + +/** Build a no-op color map (all codes empty). */ +function noColor(): typeof _c { + const nc: Record = {} + for (const k of Object.keys(_c)) nc[k] = '' + return nc as typeof _c +} + +const colorsDisabled = !!process.env['NO_COLOR'] || !process.stderr.isTTY +const c: typeof _c = colorsDisabled ? noColor() : _c + +// --------------------------------------------------------------------------- +// Tool-Arg Summarizer +// --------------------------------------------------------------------------- + +/** + * Produce a short human-readable summary of tool arguments. + * Returns a string like "path/to/file.ts" or "grep pattern *.ts" — never the + * full JSON blob. + */ +export function summarizeToolArgs(toolName: unknown, toolInput: unknown): string { + const name = String(toolName ?? '') + const input = (toolInput && typeof toolInput === 'object') ? toolInput as Record : {} + + switch (name) { + case 'Read': + case 'read': + return shortPath(input.file_path) || '' + case 'Write': + case 'write': + return shortPath(input.file_path) || '' + case 'Edit': + case 'edit': + return shortPath(input.file_path) || '' + case 'Bash': + case 'bash': { + const cmd = String(input.command ?? '') + return cmd.length > 80 ? cmd.slice(0, 77) + '...' : cmd + } + case 'Glob': + case 'glob': + return String(input.pattern ?? '') + case 'Grep': + case 'grep': + case 'Search': + case 'search': { + const pat = String(input.pattern ?? '') + const g = input.glob ? ` ${input.glob}` : '' + return `${pat}${g}` + } + case 'Task': + case 'task': { + const desc = String(input.description ?? input.prompt ?? '') + return desc.length > 60 ? desc.slice(0, 57) + '...' : desc + } + default: { + // Fallback: show first string-valued key up to 60 chars + for (const v of Object.values(input)) { + if (typeof v === 'string' && v.length > 0) { + return v.length > 60 ? v.slice(0, 57) + '...' : v + } + } + return '' + } + } +} + +function shortPath(p: unknown): string { + if (typeof p !== 'string') return '' + // Strip common CWD prefix to save space + const cwd = process.cwd() + if (p.startsWith(cwd + '/')) return p.slice(cwd.length + 1) + // Strip /Users/*/Developer/ prefix + return p.replace(/^\/Users\/[^/]+\/Developer\//, '') +} + +// --------------------------------------------------------------------------- +// Format Duration +// --------------------------------------------------------------------------- + +function formatDuration(ms: number): string { + if (ms < 1000) return `${ms}ms` + const s = (ms / 1000).toFixed(1) + return `${s}s` +} + // --------------------------------------------------------------------------- // Extension UI Auto-Responder // --------------------------------------------------------------------------- @@ -78,55 +188,78 @@ export function handleExtensionUIRequest( // Progress Formatter // --------------------------------------------------------------------------- -export function formatProgress(event: Record, verbose: boolean): string | null { +export function formatProgress(event: Record, ctx: ProgressContext): string | null { const type = String(event.type ?? '') + // Emit accumulated thinking preview before tool calls + if (ctx.thinkingPreview) { + // thinkingPreview is handled by the caller in headless.ts — it prepends + // the thinking line before the current event's line. We return the thinking + // line as a prefix joined with newline. + } + switch (type) { case 'tool_execution_start': { + if (!ctx.verbose) return null const name = String(event.toolName ?? 'unknown') - const summary = summarizeToolArgs(name, event.args as Record | undefined) - return summary ? ` [tool] ${name} ${summary}` : ` [tool] ${name}` + const args = summarizeToolArgs(event.toolName, event.args) + const argStr = args ? ` ${c.dim}${args}${c.reset}` : '' + return ` ${c.dim}[tool]${c.reset} ${name}${argStr}` } case 'tool_execution_end': { - if (verbose) { - const name = String(event.toolName ?? 'unknown') - const isError = Boolean(event.isError) - return isError ? ` [tool] ${name} ✗ error` : null + if (!ctx.verbose) return null + const name = String(event.toolName ?? 'unknown') + const durationStr = ctx.toolDuration != null ? ` ${c.dim}${formatDuration(ctx.toolDuration)}${c.reset}` : '' + if (ctx.isError) { + return ` ${c.red}[tool] ${name} error${c.reset}${durationStr}` } - // In non-verbose, only surface errors - if (event.isError) { - const name = String(event.toolName ?? 'unknown') - return ` [tool] ${name} ✗ error` - } - return null - } - - case 'cost_update': { - const cumCost = event.cumulativeCost as Record | undefined - const costUsd = Number(cumCost?.costUsd ?? 0) - if (costUsd > 0) { - const tokens = event.tokens as Record | undefined - const inK = tokens ? (tokens.input / 1000).toFixed(1) : '?' - const outK = tokens ? (tokens.output / 1000).toFixed(1) : '?' - return ` [cost] $${costUsd.toFixed(4)} (${inK}k in / ${outK}k out)` - } - return null + return ` ${c.dim}[tool] ${name} done${c.reset}${durationStr}` } case 'agent_start': - return '[agent] Session started' + return `${c.dim}[agent] Session started${c.reset}` - case 'agent_end': - return '[agent] Session ended' - - case 'extension_ui_request': - if (event.method === 'notify') { - const msg = String(event.message ?? '') - return msg ? `[gsd] ${msg}` : null + case 'agent_end': { + let line = `${c.dim}[agent] Session ended${c.reset}` + if (ctx.lastCost) { + const cost = `$${ctx.lastCost.costUsd.toFixed(4)}` + const tokens = `${ctx.lastCost.inputTokens + ctx.lastCost.outputTokens} tokens` + line += ` ${c.dim}(${cost}, ${tokens})${c.reset}` } - // setStatus / setWidget are TUI-specific — suppress in text mode + return line + } + + case 'extension_ui_request': { + const method = String(event.method ?? '') + + if (method === 'notify') { + const msg = String(event.message ?? '') + if (!msg) return null + // Bold important notifications + const isImportant = /^(committed:|verification gate:|milestone|blocked:)/i.test(msg) + return isImportant + ? `${c.bold}[gsd] ${msg}${c.reset}` + : `[gsd] ${msg}` + } + + if (method === 'setStatus') { + // Parse statusKey for phase transitions + const statusKey = String(event.statusKey ?? '') + const msg = String(event.message ?? '') + if (!statusKey && !msg) return null // suppress empty status lines + // Show meaningful phase transitions + if (statusKey) { + const label = parsePhaseLabel(statusKey, msg) + if (label) return `${c.cyan}[phase] ${label}${c.reset}` + } + // Fallback: show message if non-empty + if (msg) return `${c.cyan}[phase] ${msg}${c.reset}` + return null + } + return null + } default: return null @@ -134,40 +267,52 @@ export function formatProgress(event: Record, verbose: boolean) } /** - * Extract a short summary from tool arguments for display. - * Returns null if nothing useful can be summarized. + * Format a thinking preview line from accumulated LLM text deltas. */ -function summarizeToolArgs(toolName: string, args: Record | undefined): string | null { - if (!args) return null +export function formatThinkingLine(text: string): string { + const trimmed = text.replace(/\s+/g, ' ').trim() + const truncated = trimmed.length > 120 ? trimmed.slice(0, 117) + '...' : trimmed + return `${c.dim}${c.italic}[thinking] ${truncated}${c.reset}` +} - switch (toolName) { - case 'Read': - case 'read': - return args.path ? String(args.path) : null - case 'Write': - case 'write': - return args.path ? String(args.path) : null - case 'Edit': - case 'edit': - return args.path ? String(args.path) : null - case 'Bash': - case 'bash': { - const cmd = String(args.command ?? '') - return cmd.length > 80 ? cmd.slice(0, 77) + '...' : cmd || null - } - case 'Grep': - case 'grep': - return args.pattern ? `/${args.pattern}/` + (args.path ? ` in ${args.path}` : '') : null - case 'find': - return args.pattern ? String(args.pattern) + (args.path ? ` in ${args.path}` : '') : null - case 'lsp': - return args.action ? String(args.action) + (args.symbol ? ` ${args.symbol}` : '') : null - default: { - // For GSD tools, show the first string arg that looks like an ID or path - const first = Object.values(args).find(v => typeof v === 'string' && String(v).length < 80) - return first ? String(first) : null +/** + * Format a cost line (used for periodic cost updates in verbose mode). + */ +export function formatCostLine(costUsd: number, inputTokens: number, outputTokens: number): string { + return `${c.dim}[cost] $${costUsd.toFixed(4)} (${inputTokens + outputTokens} tokens)${c.reset}` +} + +// --------------------------------------------------------------------------- +// Phase Label Parser +// --------------------------------------------------------------------------- + +/** + * Parse a statusKey into a human-readable phase label. + * statusKey format varies but common patterns: + * "milestone:M1", "slice:S1.1", "task:T1.1.1", "phase:discuss", etc. + */ +function parsePhaseLabel(statusKey: string, message: string): string | null { + // Direct phase/milestone/slice/task keys + const parts = statusKey.split(':') + if (parts.length >= 2) { + const [kind, value] = parts + switch (kind.toLowerCase()) { + case 'milestone': + return `Milestone ${value}${message ? ' -- ' + message : ''}` + case 'slice': + return `Slice ${value}${message ? ' -- ' + message : ''}` + case 'task': + return `Task ${value}${message ? ' -- ' + message : ''}` + case 'phase': + return `Phase: ${value}${message ? ' -- ' + message : ''}` + default: + return `${kind}: ${value}${message ? ' -- ' + message : ''}` } } + + // Single-word status keys with a message + if (message) return `${statusKey}: ${message}` + return statusKey || null } // --------------------------------------------------------------------------- diff --git a/src/headless.ts b/src/headless.ts index 5e54cac64..cadcc990d 100644 --- a/src/headless.ts +++ b/src/headless.ts @@ -43,9 +43,10 @@ import { VALID_OUTPUT_FORMATS } from './headless-types.js' import { handleExtensionUIRequest, formatProgress, + formatThinkingLine, startSupervisedStdinReader, } from './headless-ui.js' -import type { ExtensionUIRequest } from './headless-ui.js' +import type { ExtensionUIRequest, ProgressContext } from './headless-ui.js' import { loadContext, @@ -370,6 +371,11 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): let cumulativeCacheWriteTokens = 0 let lastSessionId: string | undefined + // Verbose text-mode state + const toolStartTimes = new Map() + let lastCostData: { costUsd: number; inputTokens: number; outputTokens: number } | undefined + let thinkingBuffer = '' + // Emit HeadlessJsonResult to stdout for --output-format json batch mode function emitBatchJsonResult(): void { if (options.outputFormat !== 'json') return @@ -504,8 +510,65 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number): lastSessionId = String((eventObj as Record).sessionId ?? '') } } else if (!options.json) { - // Progress output to stderr - const line = formatProgress(eventObj, !!options.verbose) + // Progress output to stderr with verbose state tracking + const eventType = String(eventObj.type ?? '') + + // Track cost_update events for agent_end summary + if (eventType === 'cost_update') { + const data = eventObj as Record + const cumCost = data.cumulativeCost as Record | undefined + if (cumCost) { + const tokens = data.tokens as Record | undefined + lastCostData = { + costUsd: Number(cumCost.costUsd ?? 0), + inputTokens: tokens?.input ?? 0, + outputTokens: tokens?.output ?? 0, + } + } + } + + // Accumulate thinking text from message_update text_delta events + if (eventType === 'message_update') { + const ame = eventObj.assistantMessageEvent as Record | undefined + if (ame?.type === 'text_delta') { + thinkingBuffer += String(ame.text ?? '') + } + } + + // Track tool execution start timestamps + if (eventType === 'tool_execution_start') { + const toolCallId = String(eventObj.toolCallId ?? eventObj.id ?? '') + if (toolCallId) toolStartTimes.set(toolCallId, Date.now()) + } + + // Flush thinking buffer before tool calls or message end + if (options.verbose && thinkingBuffer.trim() && + (eventType === 'tool_execution_start' || eventType === 'message_end')) { + process.stderr.write(formatThinkingLine(thinkingBuffer) + '\n') + thinkingBuffer = '' + } + + // Compute tool duration for tool_execution_end + let toolDuration: number | undefined + let isToolError = false + if (eventType === 'tool_execution_end') { + const toolCallId = String(eventObj.toolCallId ?? eventObj.id ?? '') + const startTime = toolStartTimes.get(toolCallId) + if (startTime) { + toolDuration = Date.now() - startTime + toolStartTimes.delete(toolCallId) + } + isToolError = eventObj.isError === true || eventObj.error != null + } + + const ctx: ProgressContext = { + verbose: !!options.verbose, + toolDuration, + isError: isToolError, + lastCost: eventType === 'agent_end' ? lastCostData : undefined, + } + + const line = formatProgress(eventObj, ctx) if (line) process.stderr.write(line + '\n') } diff --git a/src/tests/headless-progress.test.ts b/src/tests/headless-progress.test.ts index 0e55cbafa..febae1aa4 100644 --- a/src/tests/headless-progress.test.ts +++ b/src/tests/headless-progress.test.ts @@ -1,93 +1,111 @@ import { describe, it } from 'node:test' import assert from 'node:assert/strict' -import { formatProgress } from '../headless-ui.js' +import { formatProgress, formatThinkingLine, formatCostLine, summarizeToolArgs } from '../headless-ui.js' +import type { ProgressContext } from '../headless-ui.js' + +// Tests run with NO_COLOR or non-TTY stderr, so ANSI codes are empty strings. +// We test content, not escape sequences. + +function ctx(overrides: Partial = {}): ProgressContext { + return { verbose: true, ...overrides } +} describe('formatProgress', () => { describe('tool_execution_start', () => { - it('shows tool name and summarized args', () => { + it('shows tool name and summarized args in verbose mode', () => { const result = formatProgress({ type: 'tool_execution_start', toolName: 'bash', args: { command: 'npm run build' }, - }, false) - assert.equal(result, ' [tool] bash npm run build') + }, ctx()) + assert.ok(result) + assert.ok(result.includes('bash')) + assert.ok(result.includes('npm run build')) }) it('shows Read with file path', () => { const result = formatProgress({ type: 'tool_execution_start', toolName: 'Read', - args: { path: 'src/main.ts' }, - }, false) - assert.equal(result, ' [tool] Read src/main.ts') + args: { file_path: 'src/main.ts' }, + }, ctx()) + assert.ok(result) + assert.ok(result.includes('Read')) + assert.ok(result.includes('src/main.ts')) }) - it('shows grep with pattern and path', () => { - const result = formatProgress({ - type: 'tool_execution_start', - toolName: 'grep', - args: { pattern: 'TODO', path: 'src/' }, - }, false) - assert.equal(result, ' [tool] grep /TODO/ in src/') - }) - - it('truncates long bash commands', () => { - const longCmd = 'a'.repeat(100) + it('returns null in non-verbose mode', () => { const result = formatProgress({ type: 'tool_execution_start', toolName: 'bash', - args: { command: longCmd }, - }, false) - assert.ok(result!.endsWith('...')) - assert.ok(result!.length < 100) + args: { command: 'npm run build' }, + }, ctx({ verbose: false })) + assert.equal(result, null) }) it('shows tool name alone when no args', () => { const result = formatProgress({ type: 'tool_execution_start', toolName: 'unknown_tool', - }, false) - assert.equal(result, ' [tool] unknown_tool') + }, ctx()) + assert.ok(result) + assert.ok(result.includes('unknown_tool')) }) }) describe('tool_execution_end', () => { - it('shows error in non-verbose mode', () => { + it('shows error with duration in verbose mode', () => { const result = formatProgress({ type: 'tool_execution_end', toolName: 'bash', - isError: true, - }, false) - assert.equal(result, ' [tool] bash ✗ error') + }, ctx({ isError: true, toolDuration: 1500 })) + assert.ok(result) + assert.ok(result.includes('bash')) + assert.ok(result.includes('error')) + assert.ok(result.includes('1.5s')) }) - it('suppresses success in non-verbose mode', () => { + it('shows done with duration in verbose mode', () => { + const result = formatProgress({ + type: 'tool_execution_end', + toolName: 'read', + }, ctx({ toolDuration: 50 })) + assert.ok(result) + assert.ok(result.includes('done')) + assert.ok(result.includes('50ms')) + }) + + it('returns null in non-verbose mode', () => { const result = formatProgress({ type: 'tool_execution_end', toolName: 'bash', isError: false, - }, false) + }, ctx({ verbose: false })) assert.equal(result, null) }) }) - describe('cost_update', () => { - it('formats cost with token breakdown', () => { - const result = formatProgress({ - type: 'cost_update', - cumulativeCost: { costUsd: 0.0523 }, - tokens: { input: 4200, output: 1100 }, - }, false) - assert.equal(result, ' [cost] $0.0523 (4.2k in / 1.1k out)') + describe('agent lifecycle', () => { + it('shows agent_start', () => { + const result = formatProgress({ type: 'agent_start' }, ctx()) + assert.ok(result) + assert.ok(result.includes('Session started')) }) - it('returns null for zero cost', () => { - const result = formatProgress({ - type: 'cost_update', - cumulativeCost: { costUsd: 0 }, - tokens: { input: 0, output: 0 }, - }, false) - assert.equal(result, null) + it('shows agent_end', () => { + const result = formatProgress({ type: 'agent_end' }, ctx()) + assert.ok(result) + assert.ok(result.includes('Session ended')) + }) + + it('shows agent_end with cost', () => { + const result = formatProgress({ type: 'agent_end' }, ctx({ + lastCost: { costUsd: 0.42, inputTokens: 10000, outputTokens: 500 }, + })) + assert.ok(result) + assert.ok(result.includes('Session ended')) + assert.ok(result.includes('$0.42')) + assert.ok(result.includes('10500 tokens')) }) }) @@ -96,9 +114,20 @@ describe('formatProgress', () => { const result = formatProgress({ type: 'extension_ui_request', method: 'notify', - message: 'Committed: fix auth', - }, false) - assert.equal(result, '[gsd] Committed: fix auth') + message: 'Auto-mode started', + }, ctx()) + assert.ok(result) + assert.ok(result.includes('Auto-mode started')) + }) + + it('bolds important notifications', () => { + const result = formatProgress({ + type: 'extension_ui_request', + method: 'notify', + message: 'Committed: fix auth flow', + }, ctx()) + assert.ok(result) + assert.ok(result.includes('Committed: fix auth flow')) }) it('suppresses empty notify', () => { @@ -106,43 +135,106 @@ describe('formatProgress', () => { type: 'extension_ui_request', method: 'notify', message: '', - }, false) + }, ctx()) assert.equal(result, null) }) - it('suppresses setStatus (TUI-only)', () => { + it('suppresses empty setStatus', () => { const result = formatProgress({ type: 'extension_ui_request', method: 'setStatus', - statusKey: 'gsd-auto', - statusText: 'auto', - }, false) + statusKey: '', + message: '', + }, ctx()) assert.equal(result, null) }) + it('shows setStatus with statusKey as phase', () => { + const result = formatProgress({ + type: 'extension_ui_request', + method: 'setStatus', + statusKey: 'milestone:M001', + message: 'Hello World CLI', + }, ctx()) + assert.ok(result) + assert.ok(result.includes('Milestone')) + assert.ok(result.includes('M001')) + }) + it('suppresses setWidget (TUI-only)', () => { const result = formatProgress({ type: 'extension_ui_request', method: 'setWidget', widgetKey: 'progress', - }, false) + }, ctx()) assert.equal(result, null) }) }) - describe('agent lifecycle', () => { - it('shows agent_start', () => { - assert.equal(formatProgress({ type: 'agent_start' }, false), '[agent] Session started') - }) - - it('shows agent_end', () => { - assert.equal(formatProgress({ type: 'agent_end' }, false), '[agent] Session ended') - }) - }) - describe('unknown events', () => { it('returns null', () => { - assert.equal(formatProgress({ type: 'some_random_event' }, false), null) + assert.equal(formatProgress({ type: 'some_random_event' }, ctx()), null) }) }) }) + +describe('summarizeToolArgs', () => { + it('extracts file_path for Read', () => { + assert.equal(summarizeToolArgs('Read', { file_path: 'src/index.ts' }), 'src/index.ts') + }) + + it('extracts file_path for write', () => { + assert.equal(summarizeToolArgs('write', { file_path: '/tmp/out.json' }), '/tmp/out.json') + }) + + it('extracts command for bash', () => { + assert.equal(summarizeToolArgs('bash', { command: 'ls -la' }), 'ls -la') + }) + + it('truncates long bash commands', () => { + const longCmd = 'a'.repeat(100) + const result = summarizeToolArgs('bash', { command: longCmd }) + assert.ok(result.endsWith('...')) + assert.ok(result.length < 100) + }) + + it('extracts pattern for grep', () => { + const result = summarizeToolArgs('grep', { pattern: 'TODO', glob: '*.ts' }) + assert.equal(result, 'TODO *.ts') + }) + + it('returns first string value for unknown tools', () => { + assert.equal(summarizeToolArgs('gsd_task_complete', { taskId: 'T01' }), 'T01') + }) + + it('returns empty string for no args', () => { + assert.equal(summarizeToolArgs('unknown', {}), '') + }) +}) + +describe('formatThinkingLine', () => { + it('formats short text', () => { + const result = formatThinkingLine('Analyzing the codebase') + assert.ok(result.includes('[thinking]')) + assert.ok(result.includes('Analyzing the codebase')) + }) + + it('truncates long text to ~120 chars', () => { + const longText = 'word '.repeat(50) // 250 chars + const result = formatThinkingLine(longText) + assert.ok(result.includes('...')) + }) + + it('collapses whitespace', () => { + const result = formatThinkingLine('line one\n\nline two\ttab') + assert.ok(result.includes('line one line two tab')) + }) +}) + +describe('formatCostLine', () => { + it('formats cost with token count', () => { + const result = formatCostLine(0.0523, 4200, 1100) + assert.ok(result.includes('$0.0523')) + assert.ok(result.includes('5300 tokens')) + }) +}) From 2175f59522c9a199026a7bc9964face14b16fba3 Mon Sep 17 00:00:00 2001 From: Jeremy McSpadden Date: Fri, 27 Mar 2026 15:29:38 -0500 Subject: [PATCH 15/27] fix(contracts): add isWorkspaceEvent guard + close routeLiveInteractionEvent exhaustiveness gap (#2878) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes two contract violations found in audit (closes #2875): 1. `isWorkspaceEvent()` type guard added next to WorkspaceEvent type definition. Applied at stream.onmessage JSON.parse boundary — replaces unsafe `as WorkspaceEvent` cast with validated parse + explicit error path for malformed payloads. 2. `routeLiveInteractionEvent()` switch extended with explicit cases for all three previously unhandled WorkspaceEvent variants: - bridge_status: handled upstream with early return, never reaches router - live_state_invalidation: handled upstream via handleLiveStateInvalidation - extension_error: terminal line produced by summarizeEvent, no live state update needed --- web/lib/gsd-workspace-store.tsx | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/web/lib/gsd-workspace-store.tsx b/web/lib/gsd-workspace-store.tsx index d22c16dc0..a34d91cf1 100644 --- a/web/lib/gsd-workspace-store.tsx +++ b/web/lib/gsd-workspace-store.tsx @@ -470,6 +470,10 @@ export type WorkspaceEvent = | TurnEndEvent | ({ type: Exclude; [key: string]: unknown } & Record) +export function isWorkspaceEvent(value: unknown): value is WorkspaceEvent { + return value !== null && typeof value === "object" && typeof (value as Record).type === "string" +} + export interface WorkspaceCommandResponse { type: "response" command: string @@ -4866,8 +4870,15 @@ export class GSDWorkspaceStore { stream.onmessage = (message) => { try { - const payload = JSON.parse(message.data) as WorkspaceEvent - this.handleEvent(payload) + const parsed: unknown = JSON.parse(message.data) + if (!isWorkspaceEvent(parsed)) { + this.patchState({ + lastClientError: "Malformed event received from stream", + terminalLines: withTerminalLine(this.state.terminalLines, createTerminalLine("error", "Malformed event received from stream")), + }) + return + } + this.handleEvent(parsed) } catch (error) { const text = normalizeClientError(error) this.patchState({ @@ -4945,6 +4956,15 @@ export class GSDWorkspaceStore { case "tool_execution_end": this.handleToolExecutionEnd(event as ToolExecutionEndEvent) break + case "bridge_status": + // Handled upstream in handleEvent with early return — never reaches here + break + case "live_state_invalidation": + // Handled upstream in handleEvent via handleLiveStateInvalidation — no live interaction state update needed + break + case "extension_error": + // Terminal line produced by summarizeEvent — no live interaction state update needed + break } } From 24c4e393a7aeced870733637361cace29ca3f9b3 Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 21:30:13 +0100 Subject: [PATCH 16/27] fix(cli): let gsd update bypass version mismatch gate (#2845) * test(integration): suppress npm pack buffer overflows * fix(cli): let gsd update bypass version mismatch gate --- src/cli.ts | 45 ++++++++++++++++++------------------- src/tests/app-smoke.test.ts | 13 +++++++++++ 2 files changed, 35 insertions(+), 23 deletions(-) diff --git a/src/cli.ts b/src/cli.ts index 467760153..a5b255fa9 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -133,21 +133,6 @@ const isPrintMode = cliFlags.print || cliFlags.mode !== undefined // Early resource-skew check — must run before TTY gate so version mismatch // errors surface even in non-TTY environments. -exitIfManagedResourcesAreNewer(agentDir) - -// Early TTY check — must come before heavy initialization to avoid dangling -// handles that prevent process.exit() from completing promptly. -const hasSubcommand = cliFlags.messages.length > 0 -if (!process.stdin.isTTY && !isPrintMode && !hasSubcommand && !cliFlags.listModels && !cliFlags.web) { - process.stderr.write('[gsd] Error: Interactive mode requires a terminal (TTY).\n') - process.stderr.write('[gsd] Non-interactive alternatives:\n') - process.stderr.write('[gsd] gsd --print "your message" Single-shot prompt\n') - process.stderr.write('[gsd] gsd --mode rpc JSON-RPC over stdin/stdout\n') - process.stderr.write('[gsd] gsd --mode mcp MCP server over stdin/stdout\n') - process.stderr.write('[gsd] gsd --mode text "message" Text output mode\n') - process.exit(1) -} - async function ensureRtkBootstrap(): Promise { if ((ensureRtkBootstrap as { _done?: boolean })._done) return @@ -170,6 +155,28 @@ async function ensureRtkBootstrap(): Promise { } } +// `gsd update` — update to the latest version via npm +if (cliFlags.messages[0] === 'update') { + const { runUpdate } = await import('./update-cmd.js') + await runUpdate() + process.exit(0) +} + +exitIfManagedResourcesAreNewer(agentDir) + +// Early TTY check — must come before heavy initialization to avoid dangling +// handles that prevent process.exit() from completing promptly. +const hasSubcommand = cliFlags.messages.length > 0 +if (!process.stdin.isTTY && !isPrintMode && !hasSubcommand && !cliFlags.listModels && !cliFlags.web) { + process.stderr.write('[gsd] Error: Interactive mode requires a terminal (TTY).\n') + process.stderr.write('[gsd] Non-interactive alternatives:\n') + process.stderr.write('[gsd] gsd --print "your message" Single-shot prompt\n') + process.stderr.write('[gsd] gsd --mode rpc JSON-RPC over stdin/stdout\n') + process.stderr.write('[gsd] gsd --mode mcp MCP server over stdin/stdout\n') + process.stderr.write('[gsd] gsd --mode text "message" Text output mode\n') + process.exit(1) +} + // `gsd --help` — show subcommand-specific help const subcommand = cliFlags.messages[0] if (subcommand && process.argv.includes('--help')) { @@ -199,13 +206,6 @@ if (cliFlags.messages[0] === 'config') { process.exit(0) } -// `gsd update` — update to the latest version via npm -if (cliFlags.messages[0] === 'update') { - const { runUpdate } = await import('./update-cmd.js') - await runUpdate() - process.exit(0) -} - // `gsd web stop [path|all]` — stop web server before anything else if (cliFlags.messages[0] === 'web' && cliFlags.messages[1] === 'stop') { const webFlags = parseWebCliArgs(process.argv) @@ -688,4 +688,3 @@ const interactiveMode = new InteractiveMode(session) markStartup('InteractiveMode') printStartupTimings() await interactiveMode.run() - diff --git a/src/tests/app-smoke.test.ts b/src/tests/app-smoke.test.ts index d68512937..8a43d8cbb 100644 --- a/src/tests/app-smoke.test.ts +++ b/src/tests/app-smoke.test.ts @@ -187,6 +187,19 @@ test("loader MIN_NODE_MAJOR matches package.json engines field", () => { `loader MIN_NODE_MAJOR (${loaderMin}) must match package.json engines.node (>=${engineMin}.0.0)`); }); +test("cli.ts lets gsd update bypass the managed-resource mismatch gate", () => { + const cliSrc = readFileSync(join(projectRoot, "src", "cli.ts"), "utf-8"); + const updateBranchIndex = cliSrc.indexOf("if (cliFlags.messages[0] === 'update')") + const mismatchGateIndex = cliSrc.indexOf("exitIfManagedResourcesAreNewer(agentDir)") + + assert.ok(updateBranchIndex !== -1, "cli.ts contains an update branch") + assert.ok(mismatchGateIndex !== -1, "cli.ts contains the managed-resource mismatch gate") + assert.ok( + updateBranchIndex < mismatchGateIndex, + "gsd update must run before the managed-resource mismatch gate", + ) +}); + // ═══════════════════════════════════════════════════════════════════════════ // 3. resource-loader syncs bundled resources // ═══════════════════════════════════════════════════════════════════════════ From a6bb48e82d0120bff075c9f4e27b3e5c3f7c5d04 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 27 Mar 2026 20:44:58 +0000 Subject: [PATCH 17/27] release: v2.55.0 --- CHANGELOG.md | 19 ++++++++++++++++++- native/npm/darwin-arm64/package.json | 2 +- native/npm/darwin-x64/package.json | 2 +- native/npm/linux-arm64-gnu/package.json | 2 +- native/npm/linux-x64-gnu/package.json | 2 +- native/npm/win32-x64-msvc/package.json | 2 +- package.json | 2 +- packages/pi-coding-agent/package.json | 2 +- pkg/package.json | 2 +- 9 files changed, 26 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a5e7be9eb..2dc4f3e2e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,22 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] +## [2.55.0] - 2026-03-27 + +### Added +- colorized headless verbose output with thinking, phases, cost, and durations (#2886) +- headless text mode observability + skip UAT pause (#2867) + +### Fixed +- **cli**: let gsd update bypass version mismatch gate (#2845) +- **contracts**: add isWorkspaceEvent guard + close routeLiveInteractionEvent exhaustiveness gap (#2878) +- **gsd**: use project root for prior-slice dispatch guard (#2863) +- **gsd**: include queue context in milestone planning prompts (#2846) +- detect monorepo roots in project discovery to prevent workspace fragmentation (#2849) +- **bg-shell**: recover from deleted cwd in timers (#2850) +- **gsd**: enable dynamic routing without models section (#2851) +- **interactive**: fully remove providers from /providers (#2852) + ## [2.54.0] - 2026-03-27 ### Added @@ -2079,7 +2095,8 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ### Changed - License updated to MIT -[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.54.0...HEAD +[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.55.0...HEAD +[2.55.0]: https://github.com/gsd-build/gsd-2/compare/v2.54.0...v2.55.0 [2.54.0]: https://github.com/gsd-build/gsd-2/compare/v2.53.0...v2.54.0 [2.53.0]: https://github.com/gsd-build/gsd-2/compare/v2.52.0...v2.53.0 [2.52.0]: https://github.com/gsd-build/gsd-2/compare/v2.51.0...v2.52.0 diff --git a/native/npm/darwin-arm64/package.json b/native/npm/darwin-arm64/package.json index a318abc8b..3216cd21f 100644 --- a/native/npm/darwin-arm64/package.json +++ b/native/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-darwin-arm64", - "version": "2.54.0", + "version": "2.55.0", "description": "GSD native engine binary for macOS ARM64", "os": [ "darwin" diff --git a/native/npm/darwin-x64/package.json b/native/npm/darwin-x64/package.json index e4b5cafd8..f30ce8379 100644 --- a/native/npm/darwin-x64/package.json +++ b/native/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-darwin-x64", - "version": "2.54.0", + "version": "2.55.0", "description": "GSD native engine binary for macOS Intel", "os": [ "darwin" diff --git a/native/npm/linux-arm64-gnu/package.json b/native/npm/linux-arm64-gnu/package.json index a696bcffe..2d201f5ee 100644 --- a/native/npm/linux-arm64-gnu/package.json +++ b/native/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-linux-arm64-gnu", - "version": "2.54.0", + "version": "2.55.0", "description": "GSD native engine binary for Linux ARM64 (glibc)", "os": [ "linux" diff --git a/native/npm/linux-x64-gnu/package.json b/native/npm/linux-x64-gnu/package.json index 5476abbe3..e87092d07 100644 --- a/native/npm/linux-x64-gnu/package.json +++ b/native/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-linux-x64-gnu", - "version": "2.54.0", + "version": "2.55.0", "description": "GSD native engine binary for Linux x64 (glibc)", "os": [ "linux" diff --git a/native/npm/win32-x64-msvc/package.json b/native/npm/win32-x64-msvc/package.json index 0281d3215..159255c52 100644 --- a/native/npm/win32-x64-msvc/package.json +++ b/native/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-win32-x64-msvc", - "version": "2.54.0", + "version": "2.55.0", "description": "GSD native engine binary for Windows x64 (MSVC)", "os": [ "win32" diff --git a/package.json b/package.json index df45caf1c..0c925eb9b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "gsd-pi", - "version": "2.54.0", + "version": "2.55.0", "description": "GSD — Get Shit Done coding agent", "license": "MIT", "repository": { diff --git a/packages/pi-coding-agent/package.json b/packages/pi-coding-agent/package.json index f986489ec..82bcd709a 100644 --- a/packages/pi-coding-agent/package.json +++ b/packages/pi-coding-agent/package.json @@ -1,6 +1,6 @@ { "name": "@gsd/pi-coding-agent", - "version": "2.54.0", + "version": "2.55.0", "description": "Coding agent CLI (vendored from pi-mono)", "type": "module", "piConfig": { diff --git a/pkg/package.json b/pkg/package.json index 73f7bf62c..8387ef366 100644 --- a/pkg/package.json +++ b/pkg/package.json @@ -1,6 +1,6 @@ { "name": "@glittercowboy/gsd", - "version": "2.54.0", + "version": "2.55.0", "piConfig": { "name": "gsd", "configDir": ".gsd" From a0b9a85a20afe420c175b8088fe945092dafda39 Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 21:47:14 +0100 Subject: [PATCH 18/27] fix(gsd): preserve auto start model through discuss (#2837) --- src/resources/extensions/gsd/auto-start.ts | 18 ++++++++---- .../tests/auto-start-model-capture.test.ts | 28 +++++++++++++++++++ 2 files changed, 41 insertions(+), 5 deletions(-) create mode 100644 src/resources/extensions/gsd/tests/auto-start-model-capture.test.ts diff --git a/src/resources/extensions/gsd/auto-start.ts b/src/resources/extensions/gsd/auto-start.ts index f0b45a04e..38012e2c2 100644 --- a/src/resources/extensions/gsd/auto-start.ts +++ b/src/resources/extensions/gsd/auto-start.ts @@ -131,6 +131,15 @@ export async function bootstrapAutoSession( return false; } + // Capture the user's session model before guided-flow dispatch can apply a + // phase-specific planning model for a discuss turn (#2829). + const startModelSnapshot = ctx.model + ? { + provider: ctx.model.provider, + id: ctx.model.id, + } + : null; + try { // Validate GSD_PROJECT_ID early so the user gets immediate feedback const customProjectId = process.env.GSD_PROJECT_ID; @@ -576,12 +585,11 @@ export async function bootstrapAutoSession( // Initialize routing history initRoutingHistory(s.basePath); - // Capture session's model at auto-mode start (#650) - const currentModel = ctx.model; - if (currentModel) { + // Restore the model that was active when auto bootstrap began (#650, #2829). + if (startModelSnapshot) { s.autoModeStartModel = { - provider: currentModel.provider, - id: currentModel.id, + provider: startModelSnapshot.provider, + id: startModelSnapshot.id, }; } diff --git a/src/resources/extensions/gsd/tests/auto-start-model-capture.test.ts b/src/resources/extensions/gsd/tests/auto-start-model-capture.test.ts new file mode 100644 index 000000000..3daa00f3f --- /dev/null +++ b/src/resources/extensions/gsd/tests/auto-start-model-capture.test.ts @@ -0,0 +1,28 @@ +import test from "node:test"; +import assert from "node:assert/strict"; +import { readFileSync } from "node:fs"; +import { join } from "node:path"; + +const sourcePath = join(import.meta.dirname, "..", "auto-start.ts"); +const source = readFileSync(sourcePath, "utf-8"); + +test("bootstrapAutoSession snapshots ctx.model before guided-flow entry (#2829)", () => { + const snapshotIdx = source.indexOf("const startModelSnapshot = ctx.model"); + assert.ok(snapshotIdx > -1, "auto-start.ts should snapshot ctx.model at bootstrap start"); + + const firstDiscussIdx = source.indexOf('await showSmartEntry(ctx, pi, base, { step: requestedStepMode });'); + assert.ok(firstDiscussIdx > -1, "auto-start.ts should route through showSmartEntry during guided flow"); + + assert.ok( + snapshotIdx < firstDiscussIdx, + "auto-start.ts must capture the start model before guided-flow can mutate ctx.model", + ); +}); + +test("bootstrapAutoSession restores autoModeStartModel from the early snapshot (#2829)", () => { + const assignmentIdx = source.indexOf("s.autoModeStartModel = {"); + assert.ok(assignmentIdx > -1, "auto-start.ts should assign autoModeStartModel"); + + const snapshotRefIdx = source.indexOf("provider: startModelSnapshot.provider", assignmentIdx); + assert.ok(snapshotRefIdx > -1, "autoModeStartModel should be restored from startModelSnapshot"); +}); From cedf6a558d51a871de04c1325594e809b18252ce Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 21:47:44 +0100 Subject: [PATCH 19/27] fix(web): improve light theme terminal contrast (#2819) Unify the Power Mode xterm light palette behind a shared helper and replace low-contrast ANSI white/yellow entries with contrast-safe values. Add a regression test that guards both the readable light-theme palette and the shared helper wiring so the duplicated terminal palettes do not drift again. Closes #2810 --- src/tests/xterm-theme.test.ts | 57 +++++++++++++++ web/components/gsd/main-session-terminal.tsx | 70 +------------------ web/components/gsd/shell-terminal.tsx | 73 +------------------- web/lib/xterm-theme.ts | 70 +++++++++++++++++++ 4 files changed, 129 insertions(+), 141 deletions(-) create mode 100644 src/tests/xterm-theme.test.ts create mode 100644 web/lib/xterm-theme.ts diff --git a/src/tests/xterm-theme.test.ts b/src/tests/xterm-theme.test.ts new file mode 100644 index 000000000..b3f419be3 --- /dev/null +++ b/src/tests/xterm-theme.test.ts @@ -0,0 +1,57 @@ +import test from "node:test"; +import assert from "node:assert/strict"; +import { readFileSync } from "node:fs"; +import { resolve } from "node:path"; + +const { getXtermTheme } = await import("../../web/lib/xterm-theme.ts"); + +function hexToRgb(hex: string): [number, number, number] { + const normalized = hex.replace("#", ""); + const value = normalized.length === 3 + ? normalized.split("").map((char) => char + char).join("") + : normalized; + const int = Number.parseInt(value, 16); + return [(int >> 16) & 255, (int >> 8) & 255, int & 255]; +} + +function srgbToLinear(channel: number): number { + const normalized = channel / 255; + return normalized <= 0.04045 + ? normalized / 12.92 + : ((normalized + 0.055) / 1.055) ** 2.4; +} + +function contrastRatio(foreground: string, background: string): number { + const luminance = (hex: string) => { + const [r, g, b] = hexToRgb(hex).map(srgbToLinear); + return 0.2126 * r + 0.7152 * g + 0.0722 * b; + }; + const [lighter, darker] = [luminance(foreground), luminance(background)].sort((a, b) => b - a); + return (lighter + 0.05) / (darker + 0.05); +} + +test("light xterm palette keeps warning and ANSI white entries readable", () => { + const theme = getXtermTheme(false); + + assert.ok(contrastRatio(theme.foreground, theme.background) >= 14, "foreground should remain highly legible"); + assert.ok(contrastRatio(theme.yellow, theme.background) >= 4.5, "yellow should meet readable contrast"); + assert.ok(contrastRatio(theme.brightYellow, theme.background) >= 4.5, "bright yellow should meet readable contrast"); + assert.ok(contrastRatio(theme.white, theme.background) >= 4.5, "white should stay readable on light background"); + assert.ok(contrastRatio(theme.brightWhite, theme.background) >= 4.5, "bright white should stay readable on light background"); +}); + +test("terminal components share the central xterm theme helper", () => { + const shellSource = readFileSync( + resolve(import.meta.dirname, "../../web/components/gsd/shell-terminal.tsx"), + "utf8", + ); + const mainSource = readFileSync( + resolve(import.meta.dirname, "../../web/components/gsd/main-session-terminal.tsx"), + "utf8", + ); + + assert.match(shellSource, /from \"@\/lib\/xterm-theme\"/); + assert.match(mainSource, /from \"@\/lib\/xterm-theme\"/); + assert.doesNotMatch(shellSource, /const XTERM_LIGHT_THEME =/); + assert.doesNotMatch(mainSource, /const XTERM_LIGHT_THEME =/); +}); diff --git a/web/components/gsd/main-session-terminal.tsx b/web/components/gsd/main-session-terminal.tsx index f48b43a31..95216badc 100644 --- a/web/components/gsd/main-session-terminal.tsx +++ b/web/components/gsd/main-session-terminal.tsx @@ -7,6 +7,7 @@ import { cn } from "@/lib/utils" import { validateImageFile } from "@/lib/image-utils" import { buildProjectAbsoluteUrl, buildProjectPath } from "@/lib/project-url" import { authFetch, appendAuthParam } from "@/lib/auth" +import { getXtermOptions, getXtermTheme } from "@/lib/xterm-theme" import "@xterm/xterm/css/xterm.css" type XTerminal = import("@xterm/xterm").Terminal @@ -23,75 +24,6 @@ const MIN_INITIAL_ATTACH_HEIGHT = 120 const MIN_INITIAL_ATTACH_COLS = 20 const MIN_INITIAL_ATTACH_ROWS = 8 -const XTERM_DARK_THEME = { - background: "#0a0a0a", - foreground: "#e4e4e7", - cursor: "#e4e4e7", - cursorAccent: "#0a0a0a", - selectionBackground: "#27272a", - selectionForeground: "#e4e4e7", - black: "#18181b", - red: "#ef4444", - green: "#22c55e", - yellow: "#eab308", - blue: "#3b82f6", - magenta: "#a855f7", - cyan: "#06b6d4", - white: "#e4e4e7", - brightBlack: "#52525b", - brightRed: "#f87171", - brightGreen: "#4ade80", - brightYellow: "#facc15", - brightBlue: "#60a5fa", - brightMagenta: "#c084fc", - brightCyan: "#22d3ee", - brightWhite: "#fafafa", -} as const - -const XTERM_LIGHT_THEME = { - background: "#f5f5f5", - foreground: "#1a1a1a", - cursor: "#1a1a1a", - cursorAccent: "#f5f5f5", - selectionBackground: "#d4d4d8", - selectionForeground: "#1a1a1a", - black: "#1a1a1a", - red: "#dc2626", - green: "#16a34a", - yellow: "#ca8a04", - blue: "#2563eb", - magenta: "#9333ea", - cyan: "#0891b2", - white: "#e4e4e7", - brightBlack: "#71717a", - brightRed: "#ef4444", - brightGreen: "#22c55e", - brightYellow: "#eab308", - brightBlue: "#3b82f6", - brightMagenta: "#a855f7", - brightCyan: "#06b6d4", - brightWhite: "#fafafa", -} as const - -function getXtermTheme(isDark: boolean) { - return isDark ? XTERM_DARK_THEME : XTERM_LIGHT_THEME -} - -function getXtermOptions(isDark: boolean, fontSize?: number) { - return { - cursorBlink: true, - cursorStyle: "bar" as const, - fontSize: fontSize ?? 13, - fontFamily: "'SF Mono', 'Cascadia Code', 'Fira Code', Menlo, Monaco, 'Courier New', monospace", - lineHeight: 1.35, - letterSpacing: 0, - theme: getXtermTheme(isDark), - allowProposedApi: true, - scrollback: 10000, - convertEol: false, - } -} - function getAttachableTerminalSize(container: HTMLDivElement | null, terminal: XTerminal | null): { cols: number; rows: number } | null { if (!container || !terminal) return null diff --git a/web/components/gsd/shell-terminal.tsx b/web/components/gsd/shell-terminal.tsx index b2a3b29fc..22050df45 100644 --- a/web/components/gsd/shell-terminal.tsx +++ b/web/components/gsd/shell-terminal.tsx @@ -8,6 +8,7 @@ import { validateImageFile } from "@/lib/image-utils" import { filterInitialGsdHeader } from "@/lib/initial-gsd-header-filter" import { buildProjectAbsoluteUrl, buildProjectPath } from "@/lib/project-url" import { authFetch, appendAuthParam } from "@/lib/auth" +import { getXtermOptions, getXtermTheme } from "@/lib/xterm-theme" import "@xterm/xterm/css/xterm.css" type XTerminal = import("@xterm/xterm").Terminal @@ -37,78 +38,6 @@ interface ShellTerminalProps { projectCwd?: string } -// ─── xterm themes ───────────────────────────────────────────────────────────── - -const XTERM_DARK_THEME = { - background: "#0a0a0a", - foreground: "#e4e4e7", - cursor: "#e4e4e7", - cursorAccent: "#0a0a0a", - selectionBackground: "#27272a", - selectionForeground: "#e4e4e7", - black: "#18181b", - red: "#ef4444", - green: "#22c55e", - yellow: "#eab308", - blue: "#3b82f6", - magenta: "#a855f7", - cyan: "#06b6d4", - white: "#e4e4e7", - brightBlack: "#52525b", - brightRed: "#f87171", - brightGreen: "#4ade80", - brightYellow: "#facc15", - brightBlue: "#60a5fa", - brightMagenta: "#c084fc", - brightCyan: "#22d3ee", - brightWhite: "#fafafa", -} as const - -const XTERM_LIGHT_THEME = { - background: "#f5f5f5", - foreground: "#1a1a1a", - cursor: "#1a1a1a", - cursorAccent: "#f5f5f5", - selectionBackground: "#d4d4d8", - selectionForeground: "#1a1a1a", - black: "#1a1a1a", - red: "#dc2626", - green: "#16a34a", - yellow: "#a16207", - blue: "#2563eb", - magenta: "#9333ea", - cyan: "#0891b2", - white: "#e4e4e7", - brightBlack: "#71717a", - brightRed: "#ef4444", - brightGreen: "#22c55e", - brightYellow: "#92400e", - brightBlue: "#3b82f6", - brightMagenta: "#a855f7", - brightCyan: "#06b6d4", - brightWhite: "#fafafa", -} as const - -function getXtermTheme(isDark: boolean) { - return isDark ? XTERM_DARK_THEME : XTERM_LIGHT_THEME -} - -function getXtermOptions(isDark: boolean, fontSize?: number) { - return { - cursorBlink: true, - cursorStyle: "bar" as const, - fontSize: fontSize ?? 13, - fontFamily: - "'SF Mono', 'Cascadia Code', 'Fira Code', Menlo, Monaco, 'Courier New', monospace", - lineHeight: 1.35, - letterSpacing: 0, - theme: getXtermTheme(isDark), - allowProposedApi: true, - scrollback: 10000, - convertEol: false, - } -} - function getRenderableTerminalSize(container: HTMLDivElement | null, terminal: XTerminal | null): { cols: number; rows: number } | null { if (!container || !terminal) return null diff --git a/web/lib/xterm-theme.ts b/web/lib/xterm-theme.ts new file mode 100644 index 000000000..afaa1ef39 --- /dev/null +++ b/web/lib/xterm-theme.ts @@ -0,0 +1,70 @@ +const XTERM_DARK_THEME = { + background: "#0a0a0a", + foreground: "#e4e4e7", + cursor: "#e4e4e7", + cursorAccent: "#0a0a0a", + selectionBackground: "#27272a", + selectionForeground: "#e4e4e7", + black: "#18181b", + red: "#ef4444", + green: "#22c55e", + yellow: "#eab308", + blue: "#3b82f6", + magenta: "#a855f7", + cyan: "#06b6d4", + white: "#e4e4e7", + brightBlack: "#52525b", + brightRed: "#f87171", + brightGreen: "#4ade80", + brightYellow: "#facc15", + brightBlue: "#60a5fa", + brightMagenta: "#c084fc", + brightCyan: "#22d3ee", + brightWhite: "#fafafa", +} as const; + +const XTERM_LIGHT_THEME = { + background: "#f5f5f5", + foreground: "#18181b", + cursor: "#18181b", + cursorAccent: "#f5f5f5", + selectionBackground: "#d4d4d8", + selectionForeground: "#18181b", + black: "#18181b", + red: "#b91c1c", + green: "#166534", + yellow: "#854d0e", + blue: "#1d4ed8", + magenta: "#7e22ce", + cyan: "#0f766e", + // Keep ANSI white entries readable on a light terminal surface. + white: "#52525b", + brightBlack: "#71717a", + brightRed: "#dc2626", + brightGreen: "#15803d", + brightYellow: "#713f12", + brightBlue: "#2563eb", + brightMagenta: "#9333ea", + brightCyan: "#0f766e", + brightWhite: "#27272a", +} as const; + +export function getXtermTheme(isDark: boolean) { + return isDark ? XTERM_DARK_THEME : XTERM_LIGHT_THEME; +} + +export function getXtermOptions(isDark: boolean, fontSize?: number) { + return { + cursorBlink: true, + cursorStyle: "bar" as const, + fontSize: fontSize ?? 13, + fontFamily: + "'SF Mono', 'Cascadia Code', 'Fira Code', Menlo, Monaco, 'Courier New', monospace", + lineHeight: 1.35, + letterSpacing: 0, + theme: getXtermTheme(isDark), + allowProposedApi: true, + scrollback: 10000, + convertEol: false, + }; +} From b8d4f037477bbe20b26f86ae21e62db42f295af2 Mon Sep 17 00:00:00 2001 From: Jeremy McSpadden Date: Fri, 27 Mar 2026 15:48:35 -0500 Subject: [PATCH 20/27] fix(parallel): resolve session lock contention and 3 related parallel-mode bugs (#2184) (#2800) Per-milestone lock isolation prevents workers from contending on shared .gsd/auto.lock. Budget ceiling scoped to current session for parallel workers. Symlink sync skip prevents ERR_FS_CP_EINVAL. Planning artifacts copied to worktree so workers can find their roadmap. Closes #2184 --- src/resources/extensions/gsd/auto-worktree.ts | 10 + src/resources/extensions/gsd/auto/phases.ts | 13 +- .../extensions/gsd/crash-recovery.ts | 5 +- .../extensions/gsd/parallel-orchestrator.ts | 7 +- src/resources/extensions/gsd/session-lock.ts | 58 ++++- .../parallel-worker-lock-contention.test.ts | 226 ++++++++++++++++++ 6 files changed, 301 insertions(+), 18 deletions(-) create mode 100644 src/resources/extensions/gsd/tests/parallel-worker-lock-contention.test.ts diff --git a/src/resources/extensions/gsd/auto-worktree.ts b/src/resources/extensions/gsd/auto-worktree.ts index ca39f72b8..1e9e78eb2 100644 --- a/src/resources/extensions/gsd/auto-worktree.ts +++ b/src/resources/extensions/gsd/auto-worktree.ts @@ -196,6 +196,11 @@ export function syncProjectRootToWorktree( const prGsd = join(projectRoot, ".gsd"); const wtGsd = join(worktreePath_, ".gsd"); + // When .gsd is a symlink to the same external directory in both locations, + // cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL). + // Compare realpaths and skip when they resolve to the same physical path (#2184). + if (isSamePath(prGsd, wtGsd)) return; + // Copy milestone directory from project root to worktree — additive only. // force:false prevents cpSync from overwriting existing worktree files. // Without this, worktree-authoritative files (e.g. VALIDATION.md written @@ -245,6 +250,11 @@ export function syncStateToProjectRoot( const wtGsd = join(worktreePath_, ".gsd"); const prGsd = join(projectRoot, ".gsd"); + // When .gsd is a symlink to the same external directory in both locations, + // cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL). + // Compare realpaths and skip when they resolve to the same physical path (#2184). + if (isSamePath(wtGsd, prGsd)) return; + // 1. STATE.md — the quick-glance status used by initial deriveState() safeCopy(join(wtGsd, "STATE.md"), join(prGsd, "STATE.md"), { force: true }); diff --git a/src/resources/extensions/gsd/auto/phases.ts b/src/resources/extensions/gsd/auto/phases.ts index 6269bfc0d..c8297ee3c 100644 --- a/src/resources/extensions/gsd/auto/phases.ts +++ b/src/resources/extensions/gsd/auto/phases.ts @@ -719,8 +719,17 @@ export async function runGuards( const budgetCeiling = prefs?.budget_ceiling; if (budgetCeiling !== undefined && budgetCeiling > 0) { const currentLedger = deps.getLedger() as { units: unknown } | null; - const totalCost = currentLedger - ? deps.getProjectTotals(currentLedger.units).cost + // In parallel worker mode, only count cost from the current auto-mode session + // to avoid hitting the ceiling due to historical project-wide spend (#2184). + let costUnits = currentLedger?.units; + if (process.env.GSD_PARALLEL_WORKER && s.autoStartTime && Array.isArray(costUnits)) { + const sessionStartISO = new Date(s.autoStartTime).toISOString(); + costUnits = costUnits.filter( + (u: { startedAt?: string }) => u.startedAt != null && u.startedAt >= sessionStartISO, + ); + } + const totalCost = costUnits + ? deps.getProjectTotals(costUnits).cost : 0; const budgetPct = totalCost / budgetCeiling; const budgetAlertLevel = deps.getBudgetAlertLevel(budgetPct); diff --git a/src/resources/extensions/gsd/crash-recovery.ts b/src/resources/extensions/gsd/crash-recovery.ts index 9d5caa8ef..1b147fead 100644 --- a/src/resources/extensions/gsd/crash-recovery.ts +++ b/src/resources/extensions/gsd/crash-recovery.ts @@ -14,8 +14,7 @@ import { readFileSync, unlinkSync, existsSync } from "node:fs"; import { join } from "node:path"; import { gsdRoot } from "./paths.js"; import { atomicWriteSync } from "./atomic-write.js"; - -const LOCK_FILE = "auto.lock"; +import { effectiveLockFile } from "./session-lock.js"; export interface LockData { pid: number; @@ -28,7 +27,7 @@ export interface LockData { } function lockPath(basePath: string): string { - return join(gsdRoot(basePath), LOCK_FILE); + return join(gsdRoot(basePath), effectiveLockFile()); } /** Write or update the lock file with current auto-mode state. */ diff --git a/src/resources/extensions/gsd/parallel-orchestrator.ts b/src/resources/extensions/gsd/parallel-orchestrator.ts index b59579a26..ff2ce775b 100644 --- a/src/resources/extensions/gsd/parallel-orchestrator.ts +++ b/src/resources/extensions/gsd/parallel-orchestrator.ts @@ -21,7 +21,7 @@ import { join, dirname } from "node:path"; import { fileURLToPath } from "node:url"; import { gsdRoot } from "./paths.js"; import { createWorktree, worktreePath } from "./worktree-manager.js"; -import { autoWorktreeBranch, runWorktreePostCreateHook } from "./auto-worktree.js"; +import { autoWorktreeBranch, runWorktreePostCreateHook, syncGsdStateToWorktree } from "./auto-worktree.js"; import { nativeBranchExists } from "./native-git-bridge.js"; import { readIntegrationBranch } from "./git-service.js"; import { resolveParallelConfig } from "./preferences.js"; @@ -507,6 +507,11 @@ function createMilestoneWorktree(basePath: string, milestoneId: string): string // Run post-create hook if configured runWorktreePostCreateHook(basePath, info.path); + // Copy .gsd/ planning artifacts (milestones, CONTEXT, ROADMAP, etc.) from the + // project root into the worktree. Without this, workers for newly-planned + // milestones can't find their roadmap and exit immediately (#2184 Bug 4). + syncGsdStateToWorktree(basePath, info.path); + return info.path; } diff --git a/src/resources/extensions/gsd/session-lock.ts b/src/resources/extensions/gsd/session-lock.ts index f0f3d2562..1d5a4e7a3 100644 --- a/src/resources/extensions/gsd/session-lock.ts +++ b/src/resources/extensions/gsd/session-lock.ts @@ -83,10 +83,31 @@ let _lockAcquiredAt: number = 0; const LOCK_FILE = "auto.lock"; +/** + * Derive the effective lock file name for the current process. + * In parallel worker mode (GSD_PARALLEL_WORKER + GSD_MILESTONE_LOCK), + * each worker uses a per-milestone lock file (`auto-.lock`) + * to avoid contending on the shared `.gsd/auto.lock` (#2184). + */ +export function effectiveLockFile(): string { + const mid = process.env.GSD_PARALLEL_WORKER ? process.env.GSD_MILESTONE_LOCK : null; + return mid ? `auto-${mid}.lock` : LOCK_FILE; +} + +/** + * Derive the OS-level lock target directory for the current process. + * In parallel worker mode, uses `.gsd/parallel//` instead of + * `.gsd/` so workers don't contend on the same proper-lockfile directory (#2184). + */ +export function effectiveLockTarget(gsdDir: string): string { + const mid = process.env.GSD_PARALLEL_WORKER ? process.env.GSD_MILESTONE_LOCK : null; + return mid ? join(gsdDir, "parallel", mid) : gsdDir; +} + function lockPath(basePath: string): string { // If we have a snapshotted path from acquisition, use it for consistency if (_snapshotLockPath) return _snapshotLockPath; - return join(gsdRoot(basePath), LOCK_FILE); + return join(gsdRoot(basePath), effectiveLockFile()); } // ─── Stray Lock Cleanup ───────────────────────────────────────────────────── @@ -265,14 +286,16 @@ export function acquireSessionLock(basePath: string): SessionLockResult { } const gsdDir = gsdRoot(basePath); + const lockTarget = effectiveLockTarget(gsdDir); try { - // Try to acquire an exclusive OS-level lock on the lock file. - // We lock the directory (gsdRoot) since proper-lockfile works best - // on directories, and the lock file itself may not exist yet. - mkdirSync(gsdDir, { recursive: true }); + // Try to acquire an exclusive OS-level lock on the lock target. + // We lock a directory since proper-lockfile works best on directories, + // and the lock file itself may not exist yet. + // In parallel worker mode, lockTarget is .gsd/parallel// (#2184). + mkdirSync(lockTarget, { recursive: true }); - const release = lockfile.lockSync(gsdDir, { + const release = lockfile.lockSync(lockTarget, { realpath: false, stale: 1_800_000, // 30 minutes — safe for laptop sleep / long event loop stalls update: 10_000, // Update lock mtime every 10s to prove liveness @@ -283,7 +306,7 @@ export function acquireSessionLock(basePath: string): SessionLockResult { // Safety net: clean up lock dir on process exit if _releaseFunction // wasn't called (e.g., normal exit after clean completion) (#1245). - ensureExitHandler(gsdDir); + ensureExitHandler(lockTarget); // Write the informational lock data atomicWriteSync(lp, JSON.stringify(lockData, null, 2)); @@ -298,12 +321,12 @@ export function acquireSessionLock(basePath: string): SessionLockResult { // If no lock file or no alive process, try to clean up and re-acquire (#1245) if (!existingData || (existingPid && !isPidAlive(existingPid))) { try { - const lockDir = join(gsdDir + ".lock"); + const lockDir = join(lockTarget + ".lock"); if (existsSync(lockDir)) rmSync(lockDir, { recursive: true, force: true }); if (existsSync(lp)) unlinkSync(lp); // Retry acquisition after cleanup - const release = lockfile.lockSync(gsdDir, { + const release = lockfile.lockSync(lockTarget, { realpath: false, stale: 1_800_000, // 30 minutes — match primary lock settings update: 10_000, @@ -312,7 +335,7 @@ export function acquireSessionLock(basePath: string): SessionLockResult { assignLockState(basePath, release, lp); // Safety net — uses centralized handler to avoid double-registration - ensureExitHandler(gsdDir); + ensureExitHandler(lockTarget); atomicWriteSync(lp, JSON.stringify(lockData, null, 2)); return { acquired: true }; @@ -483,13 +506,24 @@ export function releaseSessionLock(basePath: string): void { // Non-fatal } - // Remove the proper-lockfile directory (.gsd.lock/) for the current path + // Remove the proper-lockfile directory for the current lock target. + // In parallel worker mode, this is .gsd/parallel/.lock/ (#2184). + const gsdDir = gsdRoot(basePath); + const lockTarget = effectiveLockTarget(gsdDir); try { - const lockDir = join(gsdRoot(basePath) + ".lock"); + const lockDir = join(lockTarget + ".lock"); if (existsSync(lockDir)) rmSync(lockDir, { recursive: true, force: true }); } catch { // Non-fatal } + // Also clean the per-milestone parallel directory itself if it exists + if (lockTarget !== gsdDir) { + try { + if (existsSync(lockTarget)) rmSync(lockTarget, { recursive: true, force: true }); + } catch { + // Non-fatal + } + } // Clean ALL registered lock paths (#1578) — lock files accumulate across // main project .gsd/, worktree .gsd/, and projects registry paths. diff --git a/src/resources/extensions/gsd/tests/parallel-worker-lock-contention.test.ts b/src/resources/extensions/gsd/tests/parallel-worker-lock-contention.test.ts new file mode 100644 index 000000000..0f27fa0ac --- /dev/null +++ b/src/resources/extensions/gsd/tests/parallel-worker-lock-contention.test.ts @@ -0,0 +1,226 @@ +/** + * parallel-worker-lock-contention.test.ts — Regression tests for #2184. + * + * Covers all four bugs from the parallel worker contention issue: + * Bug 1: Session lock contention — per-milestone lock isolation + * Bug 2: Budget ceiling scoped to current session for parallel workers + * Bug 3: syncProjectRootToWorktree skips when source === destination (symlinks) + * Bug 4: createMilestoneWorktree copies planning artifacts + * + * Copyright (c) 2026 Jeremy McSpadden + */ + +import { + mkdtempSync, + mkdirSync, + writeFileSync, + rmSync, + existsSync, + symlinkSync, + readFileSync, +} from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; + +import { + acquireSessionLock, + releaseSessionLock, + effectiveLockFile, + effectiveLockTarget, +} from "../session-lock.ts"; +import { gsdRoot } from "../paths.ts"; +import { + syncProjectRootToWorktree, + syncStateToProjectRoot, +} from "../auto-worktree.ts"; +import { writeLock, readCrashLock, clearLock } from "../crash-recovery.ts"; +import { describe, test, beforeEach, afterEach } from "node:test"; +import assert from "node:assert/strict"; + +// ─── Bug 1: Per-milestone lock isolation ────────────────────────────────────── + +describe("parallel-worker-lock-contention (#2184)", () => { + // Save and restore env vars between tests + const savedEnv: Record = {}; + + beforeEach(() => { + savedEnv.GSD_PARALLEL_WORKER = process.env.GSD_PARALLEL_WORKER; + savedEnv.GSD_MILESTONE_LOCK = process.env.GSD_MILESTONE_LOCK; + }); + + afterEach(() => { + if (savedEnv.GSD_PARALLEL_WORKER === undefined) { + delete process.env.GSD_PARALLEL_WORKER; + } else { + process.env.GSD_PARALLEL_WORKER = savedEnv.GSD_PARALLEL_WORKER; + } + if (savedEnv.GSD_MILESTONE_LOCK === undefined) { + delete process.env.GSD_MILESTONE_LOCK; + } else { + process.env.GSD_MILESTONE_LOCK = savedEnv.GSD_MILESTONE_LOCK; + } + }); + + // ─── Bug 1a: effectiveLockFile returns per-milestone name ──────────────── + test("Bug 1a: effectiveLockFile returns auto.lock without parallel env", () => { + delete process.env.GSD_PARALLEL_WORKER; + delete process.env.GSD_MILESTONE_LOCK; + assert.equal(effectiveLockFile(), "auto.lock"); + }); + + test("Bug 1a: effectiveLockFile returns auto-.lock in parallel mode", () => { + process.env.GSD_PARALLEL_WORKER = "1"; + process.env.GSD_MILESTONE_LOCK = "M003"; + assert.equal(effectiveLockFile(), "auto-M003.lock"); + }); + + // ─── Bug 1b: effectiveLockTarget returns per-milestone directory ───────── + test("Bug 1b: effectiveLockTarget returns gsdDir without parallel env", () => { + delete process.env.GSD_PARALLEL_WORKER; + const gsdDir = "/tmp/test/.gsd"; + assert.equal(effectiveLockTarget(gsdDir), gsdDir); + }); + + test("Bug 1b: effectiveLockTarget returns parallel/ in parallel mode", () => { + process.env.GSD_PARALLEL_WORKER = "1"; + process.env.GSD_MILESTONE_LOCK = "M003"; + const gsdDir = "/tmp/test/.gsd"; + assert.equal(effectiveLockTarget(gsdDir), join(gsdDir, "parallel", "M003")); + }); + + // ─── Bug 1c: Two parallel workers acquire independent locks ────────────── + test("Bug 1c: parallel workers use per-milestone lock files, not shared auto.lock", () => { + const base = mkdtempSync(join(tmpdir(), "gsd-parallel-lock-")); + mkdirSync(join(base, ".gsd"), { recursive: true }); + + try { + // Simulate worker for M001 + process.env.GSD_PARALLEL_WORKER = "1"; + process.env.GSD_MILESTONE_LOCK = "M001"; + + const r1 = acquireSessionLock(base); + assert.ok(r1.acquired, "M001 worker acquires lock"); + + // Verify the lock file is per-milestone + const gsdDir = gsdRoot(base); + const m001LockFile = join(gsdDir, "auto-M001.lock"); + assert.ok(existsSync(m001LockFile), "auto-M001.lock exists"); + + // The shared auto.lock should NOT exist + const sharedLockFile = join(gsdDir, "auto.lock"); + assert.ok(!existsSync(sharedLockFile), "shared auto.lock does NOT exist"); + + // The per-milestone lock target directory should exist + const m001LockTarget = join(gsdDir, "parallel", "M001"); + assert.ok(existsSync(m001LockTarget), "parallel/M001 directory exists"); + + releaseSessionLock(base); + + // After release, per-milestone lock file should be cleaned + assert.ok(!existsSync(m001LockFile), "auto-M001.lock cleaned after release"); + } finally { + delete process.env.GSD_PARALLEL_WORKER; + delete process.env.GSD_MILESTONE_LOCK; + rmSync(base, { recursive: true, force: true }); + } + }); + + // ─── Bug 1d: crash-recovery uses per-milestone lock file ───────────────── + test("Bug 1d: crash-recovery writeLock/readCrashLock uses per-milestone lock in parallel mode", () => { + const base = mkdtempSync(join(tmpdir(), "gsd-parallel-crash-")); + mkdirSync(join(base, ".gsd"), { recursive: true }); + + try { + process.env.GSD_PARALLEL_WORKER = "1"; + process.env.GSD_MILESTONE_LOCK = "M002"; + + writeLock(base, "execute-task", "M002/S01/T01"); + + const gsdDir = gsdRoot(base); + const lockFile = join(gsdDir, "auto-M002.lock"); + assert.ok(existsSync(lockFile), "crash-recovery writes auto-M002.lock"); + + const data = readCrashLock(base); + assert.ok(data !== null, "readCrashLock reads per-milestone lock"); + assert.equal(data!.unitId, "M002/S01/T01"); + + clearLock(base); + assert.ok(!existsSync(lockFile), "clearLock removes per-milestone lock"); + } finally { + delete process.env.GSD_PARALLEL_WORKER; + delete process.env.GSD_MILESTONE_LOCK; + rmSync(base, { recursive: true, force: true }); + } + }); + + // ─── Bug 3: syncProjectRootToWorktree skips same-path symlinks ─────────── + test("Bug 3: syncProjectRootToWorktree skips when .gsd resolves to same path (symlink)", () => { + const base = mkdtempSync(join(tmpdir(), "gsd-symlink-sync-")); + const externalGsd = join(base, "external-gsd"); + const projectRoot = join(base, "project"); + const worktreePath = join(base, "worktree"); + + mkdirSync(externalGsd, { recursive: true }); + mkdirSync(projectRoot, { recursive: true }); + mkdirSync(worktreePath, { recursive: true }); + + // Create the external state directory with a milestone + mkdirSync(join(externalGsd, "milestones", "M001"), { recursive: true }); + writeFileSync( + join(externalGsd, "milestones", "M001", "M001-ROADMAP.md"), + "# Roadmap", + ); + + // Symlink both project and worktree .gsd to the same external directory + symlinkSync(externalGsd, join(projectRoot, ".gsd")); + symlinkSync(externalGsd, join(worktreePath, ".gsd")); + + try { + // This should NOT throw ERR_FS_CP_EINVAL — it should skip silently + let threw = false; + try { + syncProjectRootToWorktree(projectRoot, worktreePath, "M001"); + } catch { + threw = true; + } + assert.ok(!threw, "syncProjectRootToWorktree does not throw on same-path symlink"); + + // Same for reverse direction + threw = false; + try { + syncStateToProjectRoot(worktreePath, projectRoot, "M001"); + } catch { + threw = true; + } + assert.ok(!threw, "syncStateToProjectRoot does not throw on same-path symlink"); + } finally { + rmSync(base, { recursive: true, force: true }); + } + }); + + // ─── Bug 3b: sync still works when paths are different ─────────────────── + test("Bug 3b: syncProjectRootToWorktree copies when .gsd paths are different", () => { + const base = mkdtempSync(join(tmpdir(), "gsd-diff-sync-")); + const projectRoot = join(base, "project"); + const worktreePath = join(base, "worktree"); + + mkdirSync(join(projectRoot, ".gsd", "milestones", "M001"), { recursive: true }); + mkdirSync(join(worktreePath, ".gsd", "milestones"), { recursive: true }); + + writeFileSync( + join(projectRoot, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + "# Roadmap content", + ); + + try { + syncProjectRootToWorktree(projectRoot, worktreePath, "M001"); + + // The roadmap should have been copied + const copied = join(worktreePath, ".gsd", "milestones", "M001", "M001-ROADMAP.md"); + assert.ok(existsSync(copied), "milestone roadmap copied to worktree"); + assert.equal(readFileSync(copied, "utf-8"), "# Roadmap content"); + } finally { + rmSync(base, { recursive: true, force: true }); + } + }); +}); From 447a57ae0f05c1c540beab69290a386ca3aeb376 Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 21:50:40 +0100 Subject: [PATCH 21/27] fix(gsd): resume auto-mode after transient provider pause (#2822) Transient provider recovery previously sent a hidden continue message after the backoff timer elapsed, but the auto loop had already exited. Resume the paused session through startAuto() instead so the timer actually restarts auto-mode, and cover the resumed, duplicate-resume, and missing-base-path cases with regression tests. Closes #2813 --- .../gsd/bootstrap/agent-end-recovery.ts | 9 +- .../gsd/bootstrap/provider-error-resume.ts | 53 ++++++++++ .../gsd/tests/provider-errors.test.ts | 98 +++++++++++++++++++ 3 files changed, 156 insertions(+), 4 deletions(-) create mode 100644 src/resources/extensions/gsd/bootstrap/provider-error-resume.ts diff --git a/src/resources/extensions/gsd/bootstrap/agent-end-recovery.ts b/src/resources/extensions/gsd/bootstrap/agent-end-recovery.ts index 89de63a58..22dd56075 100644 --- a/src/resources/extensions/gsd/bootstrap/agent-end-recovery.ts +++ b/src/resources/extensions/gsd/bootstrap/agent-end-recovery.ts @@ -7,6 +7,7 @@ import { pauseAutoForProviderError } from "../provider-error-pause.js"; import { isSessionSwitchInFlight, resolveAgentEnd } from "../auto-loop.js"; import { resolveModelId } from "../auto-model-selection.js"; import { clearDiscussionFlowState } from "./write-gate.js"; +import { resumeAutoAfterProviderDelay } from "./provider-error-resume.js"; import { classifyError, createRetryState, @@ -44,10 +45,10 @@ async function pauseTransientWithBackoff( retryAfterMs, resume: allowAutoResume ? () => { - pi.sendMessage( - { customType: "gsd-auto-timeout-recovery", content: "Continue execution — provider error recovery delay elapsed.", display: false }, - { triggerTurn: true }, - ); + void resumeAutoAfterProviderDelay(pi, ctx).catch((err) => { + const message = err instanceof Error ? err.message : String(err); + ctx.ui.notify(`Provider error recovery delay elapsed, but auto-mode failed to resume: ${message}`, "error"); + }); } : undefined, }); diff --git a/src/resources/extensions/gsd/bootstrap/provider-error-resume.ts b/src/resources/extensions/gsd/bootstrap/provider-error-resume.ts new file mode 100644 index 000000000..35efdcbf5 --- /dev/null +++ b/src/resources/extensions/gsd/bootstrap/provider-error-resume.ts @@ -0,0 +1,53 @@ +import type { + ExtensionAPI, + ExtensionCommandContext, + ExtensionContext, +} from "@gsd/pi-coding-agent"; + +import { getAutoDashboardData, startAuto, type AutoDashboardData } from "../auto.js"; + +type AutoResumeSnapshot = Pick; + +export interface ProviderErrorResumeDeps { + getSnapshot(): AutoResumeSnapshot; + startAuto( + ctx: ExtensionCommandContext, + pi: ExtensionAPI, + base: string, + verboseMode: boolean, + options?: { step?: boolean }, + ): Promise; +} + +const defaultDeps: ProviderErrorResumeDeps = { + getSnapshot: () => getAutoDashboardData(), + startAuto, +}; + +export async function resumeAutoAfterProviderDelay( + pi: ExtensionAPI, + ctx: ExtensionContext, + deps: ProviderErrorResumeDeps = defaultDeps, +): Promise<"resumed" | "already-active" | "not-paused" | "missing-base"> { + const snapshot = deps.getSnapshot(); + + if (snapshot.active) return "already-active"; + if (!snapshot.paused) return "not-paused"; + + if (!snapshot.basePath) { + ctx.ui.notify( + "Provider error recovery delay elapsed, but no paused auto-mode base path was available. Leaving auto-mode paused.", + "warning", + ); + return "missing-base"; + } + + await deps.startAuto( + ctx as ExtensionCommandContext, + pi, + snapshot.basePath, + false, + { step: snapshot.stepMode }, + ); + return "resumed"; +} diff --git a/src/resources/extensions/gsd/tests/provider-errors.test.ts b/src/resources/extensions/gsd/tests/provider-errors.test.ts index dfe07867c..832cea206 100644 --- a/src/resources/extensions/gsd/tests/provider-errors.test.ts +++ b/src/resources/extensions/gsd/tests/provider-errors.test.ts @@ -12,6 +12,7 @@ import { join, dirname } from "node:path"; import { fileURLToPath } from "node:url"; import { classifyError, isTransient, isTransientNetworkError } from "../error-classifier.ts"; import { pauseAutoForProviderError } from "../provider-error-pause.ts"; +import { resumeAutoAfterProviderDelay } from "../bootstrap/provider-error-resume.ts"; import { getNextFallbackModel } from "../preferences.ts"; const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -268,6 +269,90 @@ test("pauseAutoForProviderError falls back to indefinite pause when not rate lim ]); }); +// ── resumeAutoAfterProviderDelay ──────────────────────────────────────────── + +test("resumeAutoAfterProviderDelay restarts paused auto-mode from the recorded base path", async () => { + const startCalls: Array<{ base: string; verboseMode: boolean; step?: boolean }> = []; + const result = await resumeAutoAfterProviderDelay( + {} as any, + { ui: { notify() {} } } as any, + { + getSnapshot: () => ({ + active: false, + paused: true, + stepMode: true, + basePath: "/tmp/project", + }), + startAuto: async (_ctx, _pi, base, verboseMode, options) => { + startCalls.push({ base, verboseMode, step: options?.step }); + }, + }, + ); + + assert.equal(result, "resumed"); + assert.deepEqual(startCalls, [ + { base: "/tmp/project", verboseMode: false, step: true }, + ]); +}); + +test("resumeAutoAfterProviderDelay does not double-start when auto-mode is already active", async () => { + let startCalls = 0; + const result = await resumeAutoAfterProviderDelay( + {} as any, + { ui: { notify() {} } } as any, + { + getSnapshot: () => ({ + active: true, + paused: false, + stepMode: false, + basePath: "/tmp/project", + }), + startAuto: async () => { + startCalls += 1; + }, + }, + ); + + assert.equal(result, "already-active"); + assert.equal(startCalls, 0); +}); + +test("resumeAutoAfterProviderDelay leaves auto paused when no base path is available", async () => { + const notifications: Array<{ message: string; level: string }> = []; + let startCalls = 0; + + const result = await resumeAutoAfterProviderDelay( + {} as any, + { + ui: { + notify(message: string, level?: string) { + notifications.push({ message, level: level ?? "info" }); + }, + }, + } as any, + { + getSnapshot: () => ({ + active: false, + paused: true, + stepMode: false, + basePath: "", + }), + startAuto: async () => { + startCalls += 1; + }, + }, + ); + + assert.equal(result, "missing-base"); + assert.equal(startCalls, 0); + assert.deepEqual(notifications, [ + { + message: "Provider error recovery delay elapsed, but no paused auto-mode base path was available. Leaving auto-mode paused.", + level: "warning", + }, + ]); +}); + // ── Escalating backoff for transient errors (#1166) ───────────────────────── test("agent-end-recovery.ts tracks consecutive transient errors for escalating backoff", () => { @@ -303,6 +388,19 @@ test("agent-end-recovery.ts applies escalating delay for repeated transient erro ); }); +test("agent-end-recovery.ts resumes transient provider pauses through startAuto instead of a hidden prompt", () => { + const src = readFileSync(join(__dirname, "..", "bootstrap", "agent-end-recovery.ts"), "utf-8"); + + assert.ok( + src.includes("resumeAutoAfterProviderDelay"), + "agent-end-recovery.ts must resume paused auto-mode through resumeAutoAfterProviderDelay (#2813)", + ); + assert.ok( + !src.includes('Continue execution — provider error recovery delay elapsed.'), + "transient provider resume must not rely on a hidden continue prompt (#2813)", + ); +}); + // ── Codex error extraction (#1166) ────────────────────────────────────────── test("openai-codex-responses.ts extracts nested error fields", () => { From 48279ae5a481832b9b6644303e67a8b8ab2865b1 Mon Sep 17 00:00:00 2001 From: Jordan Gaytan <48812427+astrogopher@users.noreply.github.com> Date: Fri, 27 Mar 2026 15:51:18 -0500 Subject: [PATCH 22/27] =?UTF-8?q?feat(parallel):=20/gsd=20parallel=20watch?= =?UTF-8?q?=20=E2=80=94=20native=20TUI=20overlay=20for=20worker=20monitori?= =?UTF-8?q?ng=20(#2806)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(parallel): add /gsd parallel watch command and Ctrl+Alt+P overlay Integrates the parallel worker monitor as a native pi-tui overlay that renders inside the GSD session, matching the existing dashboard overlay pattern (GSDDashboardOverlay / Ctrl+Alt+G). Three integration points: - /gsd parallel watch — opens the live monitor overlay - Ctrl+Alt+P — keyboard shortcut (same pattern as Ctrl+Alt+G for status) - Tab completion: 'watch' added to parallel subcommand completions The overlay (ParallelMonitorOverlay) provides: - Per-worker panels: health dot, phase label, slice/task progress bars - Event feed: recent task completions from worktree SQLite DBs - Cost tracking: status.json with NDJSON fallback for respawned workers - Heartbeat: orchestrator timestamp or file mtime proxy - Scrollable: arrow keys / j/k, ESC/q to close - 5s auto-refresh via setInterval Reuses all data-reading logic from the standalone scripts/parallel-monitor.mjs (merged in #2799) but renders through the pi-tui theme system instead of raw ANSI codes. Follows the same overlay registration pattern as the GSD dashboard (register-shortcuts.ts + handlers/core.ts). * fix(parallel): align overlay with Component interface, add tests - Add invalidate() method required by Component interface - Fix handleInput signature: void return, not boolean - Fix Key usage: Key.escape/Key.down/Key.up (constants, not functions) - Fix render signature: single width arg, not (width, height) - Add resize listener cleanup in dispose() - Add parallel-monitor-overlay.test.ts (satisfies require-tests CI gate) * fix(parallel): use spawnSync for cross-platform path safety Replace execSync template literals with spawnSync array args for sqlite3 calls. Paths with spaces or special chars broke on Windows because execSync interpolates into a shell string. spawnSync passes args directly to the process, bypassing shell interpretation. Fixes cross-platform-filesystem-safety.test.ts assertion. --- .../gsd/bootstrap/register-shortcuts.ts | 24 + .../extensions/gsd/commands/catalog.ts | 3 +- .../gsd/commands/handlers/parallel.ts | 20 +- .../gsd/parallel-monitor-overlay.ts | 497 ++++++++++++++++++ .../tests/parallel-monitor-overlay.test.ts | 60 +++ 5 files changed, 602 insertions(+), 2 deletions(-) create mode 100644 src/resources/extensions/gsd/parallel-monitor-overlay.ts create mode 100644 src/resources/extensions/gsd/tests/parallel-monitor-overlay.test.ts diff --git a/src/resources/extensions/gsd/bootstrap/register-shortcuts.ts b/src/resources/extensions/gsd/bootstrap/register-shortcuts.ts index ea94bc9dd..03156b52a 100644 --- a/src/resources/extensions/gsd/bootstrap/register-shortcuts.ts +++ b/src/resources/extensions/gsd/bootstrap/register-shortcuts.ts @@ -5,6 +5,7 @@ import type { ExtensionAPI } from "@gsd/pi-coding-agent"; import { Key } from "@gsd/pi-tui"; import { GSDDashboardOverlay } from "../dashboard-overlay.js"; +import { ParallelMonitorOverlay } from "../parallel-monitor-overlay.js"; import { shortcutDesc } from "../../shared/mod.js"; export function registerShortcuts(pi: ExtensionAPI): void { @@ -29,4 +30,27 @@ export function registerShortcuts(pi: ExtensionAPI): void { ); }, }); + + pi.registerShortcut(Key.ctrlAlt("p"), { + description: shortcutDesc("Open parallel worker monitor", "/gsd parallel watch"), + handler: async (ctx) => { + const parallelDir = join(process.cwd(), ".gsd", "parallel"); + if (!existsSync(parallelDir)) { + ctx.ui.notify("No parallel workers found. Run /gsd parallel start first.", "info"); + return; + } + await ctx.ui.custom( + (tui, theme, _kb, done) => new ParallelMonitorOverlay(tui, theme, () => done()), + { + overlay: true, + overlayOptions: { + width: "90%", + minWidth: 80, + maxHeight: "92%", + anchor: "center", + }, + }, + ); + }, + }); } diff --git a/src/resources/extensions/gsd/commands/catalog.ts b/src/resources/extensions/gsd/commands/catalog.ts index 8045c85be..7d688d41c 100644 --- a/src/resources/extensions/gsd/commands/catalog.ts +++ b/src/resources/extensions/gsd/commands/catalog.ts @@ -59,7 +59,7 @@ export const TOP_LEVEL_SUBCOMMANDS: readonly GsdCommandDefinition[] = [ { cmd: "inspect", desc: "Show SQLite DB diagnostics" }, { cmd: "knowledge", desc: "Add persistent project knowledge (rule, pattern, or lesson)" }, { cmd: "new-milestone", desc: "Create a milestone from a specification document (headless)" }, - { cmd: "parallel", desc: "Parallel milestone orchestration (start, status, stop, merge)" }, + { cmd: "parallel", desc: "Parallel milestone orchestration (start, status, stop, merge, watch)" }, { cmd: "cmux", desc: "Manage cmux integration (status, sidebar, notifications, splits)" }, { cmd: "park", desc: "Park a milestone — skip without deleting" }, { cmd: "unpark", desc: "Reactivate a parked milestone" }, @@ -100,6 +100,7 @@ const NESTED_COMPLETIONS: CompletionMap = { { cmd: "pause", desc: "Pause a specific worker" }, { cmd: "resume", desc: "Resume a paused worker" }, { cmd: "merge", desc: "Merge completed milestone branches" }, + { cmd: "watch", desc: "Live TUI dashboard monitoring all workers" }, ], setup: [ { cmd: "llm", desc: "Configure LLM provider settings" }, diff --git a/src/resources/extensions/gsd/commands/handlers/parallel.ts b/src/resources/extensions/gsd/commands/handlers/parallel.ts index 6b2d630ff..bc8eea7da 100644 --- a/src/resources/extensions/gsd/commands/handlers/parallel.ts +++ b/src/resources/extensions/gsd/commands/handlers/parallel.ts @@ -111,7 +111,25 @@ export async function handleParallelCommand(trimmed: string, _ctx: ExtensionComm return true; } - emitParallelMessage(pi, `Unknown parallel subcommand "${subcommand}". Usage: /gsd parallel [start|status|stop|pause|resume|merge]`); + if (subcommand === "watch") { + const root = projectRoot(); + const { ParallelMonitorOverlay } = await import("../../parallel-monitor-overlay.js"); + await _ctx.ui.custom( + (tui, theme, _kb, done) => new ParallelMonitorOverlay(tui, theme, () => done(), root), + { + overlay: true, + overlayOptions: { + width: "90%", + minWidth: 80, + maxHeight: "92%", + anchor: "center", + }, + }, + ); + return true; + } + + emitParallelMessage(pi, `Unknown parallel subcommand "${subcommand}". Usage: /gsd parallel [start|status|stop|pause|resume|merge|watch]`); return true; } diff --git a/src/resources/extensions/gsd/parallel-monitor-overlay.ts b/src/resources/extensions/gsd/parallel-monitor-overlay.ts new file mode 100644 index 000000000..4b671f973 --- /dev/null +++ b/src/resources/extensions/gsd/parallel-monitor-overlay.ts @@ -0,0 +1,497 @@ +/** + * GSD Parallel Monitor Overlay + * + * Full-screen TUI overlay showing real-time parallel worker progress. + * Opened via `/gsd parallel watch` or Ctrl+Alt+P. + * Reads the same data sources as `scripts/parallel-monitor.mjs` but + * renders as a native pi-tui overlay with theme integration. + */ + +import { existsSync, statSync, readFileSync, openSync, readSync, closeSync, readdirSync } from "node:fs"; +import { join } from "node:path"; +import { spawnSync } from "node:child_process"; + +import type { Theme } from "@gsd/pi-coding-agent"; +import { truncateToWidth, visibleWidth, matchesKey, Key } from "@gsd/pi-tui"; + +import { formatDuration, STATUS_GLYPH, STATUS_COLOR } from "../shared/mod.js"; + +// ─── Types ──────────────────────────────────────────────────────────────── + +interface StatusJson { + milestoneId: string; + pid: number; + state: string; + cost: number; + lastHeartbeat: number; + startedAt: number; + worktreePath: string; +} + +interface AutoLock { + pid: number; + startedAt: string; + unitType: string; + unitId: string; + unitStartedAt: string; +} + +interface SliceProgress { + id: string; + status: string; + total: number; + done: number; +} + +interface WorkerView { + mid: string; + pid: number; + alive: boolean; + state: string; + cost: number; + heartbeatAge: number; + currentUnit: string | null; + unitType: string | null; + unitElapsed: number; + elapsed: number; + totalTasks: number; + doneTasks: number; + totalSlices: number; + doneSlices: number; + slices: SliceProgress[]; + errors: string[]; +} + +// ─── Data Helpers ───────────────────────────────────────────────────────── + +function readJsonSafe(filePath: string): T | null { + try { + return JSON.parse(readFileSync(filePath, "utf-8")) as T; + } catch { + return null; + } +} + +function isPidAlive(pid: number): boolean { + try { + process.kill(pid, 0); + return true; + } catch { + return false; + } +} + +function tailRead(filePath: string, maxBytes: number): string { + try { + const stat = statSync(filePath); + const readSize = Math.min(stat.size, maxBytes); + const fd = openSync(filePath, "r"); + const buf = Buffer.alloc(readSize); + readSync(fd, buf, 0, readSize, Math.max(0, stat.size - readSize)); + closeSync(fd); + return buf.toString("utf-8"); + } catch { + return ""; + } +} + +function discoverWorkers(basePath: string): string[] { + const parallelDir = join(basePath, ".gsd", "parallel"); + const worktreeDir = join(basePath, ".gsd", "worktrees"); + const mids = new Set(); + + if (existsSync(parallelDir)) { + try { + for (const f of readdirSync(parallelDir)) { + if (f.endsWith(".status.json")) mids.add(f.replace(".status.json", "")); + const m = f.match(/^(M\d+)\.(stderr|stdout)\.log$/); + if (m) mids.add(m[1]); + } + } catch { /* skip */ } + } + + if (existsSync(worktreeDir)) { + try { + for (const d of readdirSync(worktreeDir)) { + if (d.startsWith("M") && existsSync(join(worktreeDir, d, ".gsd", "auto.lock"))) { + mids.add(d); + } + } + } catch { /* skip */ } + } + + return [...mids].sort(); +} + +function querySliceProgress(basePath: string, mid: string): SliceProgress[] { + const dbPath = join(basePath, ".gsd", "worktrees", mid, ".gsd", "gsd.db"); + if (!existsSync(dbPath)) return []; + + try { + const sql = `SELECT s.id, s.status, COUNT(t.id), SUM(CASE WHEN t.status='complete' THEN 1 ELSE 0 END) FROM slices s LEFT JOIN tasks t ON s.milestone_id=t.milestone_id AND s.id=t.slice_id WHERE s.milestone_id='${mid}' GROUP BY s.id ORDER BY s.id`; + const result = spawnSync("sqlite3", [dbPath, sql], { timeout: 3000, encoding: "utf-8" }); + const out = (result.stdout || "").trim(); + if (!out || result.status !== 0) return []; + return out.split("\n").map((line) => { + const [id, status, total, done] = line.split("|"); + return { id, status, total: parseInt(total, 10), done: parseInt(done || "0", 10) }; + }); + } catch { + return []; + } +} + +function extractCostFromNdjson(basePath: string, mid: string): number { + const stdoutPath = join(basePath, ".gsd", "parallel", `${mid}.stdout.log`); + if (!existsSync(stdoutPath)) return 0; + try { + const content = readFileSync(stdoutPath, "utf-8"); + let total = 0; + for (const line of content.split("\n")) { + if (!line.includes("message_end")) continue; + try { + const obj = JSON.parse(line); + if (obj.type === "message_end") { + const cost = obj.message?.usage?.cost?.total; + if (typeof cost === "number") total += cost; + } + } catch { /* skip */ } + } + return total; + } catch { + return 0; + } +} + +function queryRecentCompletions(basePath: string, mid: string): string[] { + const dbPath = join(basePath, ".gsd", "worktrees", mid, ".gsd", "gsd.db"); + if (!existsSync(dbPath)) return []; + try { + const sql = `SELECT id, slice_id, one_liner FROM tasks WHERE milestone_id='${mid}' AND status='complete' AND completed_at IS NOT NULL ORDER BY completed_at DESC LIMIT 5`; + const result = spawnSync("sqlite3", [dbPath, sql], { timeout: 3000, encoding: "utf-8" }); + const out = (result.stdout || "").trim(); + if (!out || result.status !== 0) return []; + return out.split("\n").map((line) => { + const [taskId, sliceId, oneLiner] = line.split("|"); + return `✓ ${mid}/${sliceId}/${taskId}${oneLiner ? ": " + oneLiner : ""}`; + }); + } catch { + return []; + } +} + +function collectWorkerData(basePath: string): WorkerView[] { + const mids = discoverWorkers(basePath); + const parallelDir = join(basePath, ".gsd", "parallel"); + const workers: WorkerView[] = []; + + for (const mid of mids) { + const status = readJsonSafe(join(parallelDir, `${mid}.status.json`)); + const lock = readJsonSafe(join(basePath, ".gsd", "worktrees", mid, ".gsd", "auto.lock")); + const slices = querySliceProgress(basePath, mid); + + const pid = lock?.pid || status?.pid || 0; + const alive = pid ? isPidAlive(pid) : false; + + // Heartbeat: prefer status.json if PID matches, else use file mtime + let heartbeatAge = Infinity; + const statusPidMatches = status?.pid === pid && status?.lastHeartbeat; + if (statusPidMatches) { + heartbeatAge = Date.now() - status!.lastHeartbeat; + } else { + const mtimes: number[] = []; + const stdoutLog = join(parallelDir, `${mid}.stdout.log`); + const stderrLog = join(parallelDir, `${mid}.stderr.log`); + if (existsSync(stdoutLog)) mtimes.push(statSync(stdoutLog).mtimeMs); + if (existsSync(stderrLog)) mtimes.push(statSync(stderrLog).mtimeMs); + if (lock?.unitStartedAt) mtimes.push(new Date(lock.unitStartedAt).getTime()); + if (mtimes.length > 0) heartbeatAge = Date.now() - Math.max(...mtimes); + } + + let cost = status?.cost || 0; + if (cost === 0) cost = extractCostFromNdjson(basePath, mid); + + const totalTasks = slices.reduce((sum, s) => sum + s.total, 0); + const doneTasks = slices.reduce((sum, s) => sum + s.done, 0); + const doneSlices = slices.filter((s) => s.status === "complete").length; + + const elapsed = status?.startedAt + ? Date.now() - status.startedAt + : lock?.startedAt + ? Date.now() - new Date(lock.startedAt).getTime() + : 0; + + // Errors from stderr (last 4KB, only new content) + const errors: string[] = []; + const stderrLog = join(parallelDir, `${mid}.stderr.log`); + if (existsSync(stderrLog)) { + const content = tailRead(stderrLog, 4096); + for (const line of content.trim().split("\n").slice(-5)) { + if (line.includes("error") || line.includes("Error") || line.includes("exited")) { + errors.push(line.trim()); + } + } + } + + workers.push({ + mid, + pid, + alive, + state: alive ? "running" : (status?.state || "dead"), + cost, + heartbeatAge, + currentUnit: lock?.unitId || null, + unitType: lock?.unitType || null, + unitElapsed: lock?.unitStartedAt ? Date.now() - new Date(lock.unitStartedAt).getTime() : 0, + elapsed, + totalTasks, + doneTasks, + totalSlices: slices.length, + doneSlices, + slices, + errors, + }); + } + + return workers; +} + +// ─── Rendering Helpers ──────────────────────────────────────────────────── + +function unitTypeLabel(unitType: string | null): string { + const labels: Record = { + "execute-task": "EXEC", + "research-slice": "RSRCH", + "plan-slice": "PLAN", + "complete-slice": "DONE", + "complete-task": "DONE", + "reassess": "ASSESS", + "validate": "VALID", + "reassess-roadmap": "ASSESS", + }; + return labels[unitType || ""] || (unitType || "---").toUpperCase().slice(0, 5); +} + +function progressBar(done: number, total: number, width: number): string { + if (total === 0) return "░".repeat(width); + const filled = Math.round((done / total) * width); + return "█".repeat(filled) + "░".repeat(width - filled); +} + +function healthGlyph(alive: boolean, heartbeatAge: number): string { + if (!alive) return "○"; + return "●"; +} + +// ─── Overlay Class ──────────────────────────────────────────────────────── + +export class ParallelMonitorOverlay { + private tui: { requestRender: () => void }; + private theme: Theme; + private onClose: () => void; + private basePath: string; + private refreshTimer: ReturnType; + private workers: WorkerView[] = []; + private events: string[] = []; + private cachedLines?: string[]; + private scrollOffset = 0; + private disposed = false; + private resizeHandler: (() => void) | null = null; + + constructor( + tui: { requestRender: () => void }, + theme: Theme, + onClose: () => void, + basePath?: string, + ) { + this.tui = tui; + this.theme = theme; + this.onClose = onClose; + this.basePath = basePath || process.cwd(); + + this.resizeHandler = () => { + if (this.disposed) return; + this.invalidate(); + this.tui.requestRender(); + }; + process.stdout.on("resize", this.resizeHandler); + + this.refresh(); + this.refreshTimer = setInterval(() => this.refresh(), 5000); + } + + private refresh(): void { + if (this.disposed) return; + this.workers = collectWorkerData(this.basePath); + + // Collect completion events + for (const wk of this.workers) { + const completions = queryRecentCompletions(this.basePath, wk.mid); + for (const evt of completions) { + if (!this.events.includes(evt)) this.events.push(evt); + } + } + this.events = this.events.slice(-10); + + this.cachedLines = undefined; + this.tui.requestRender(); + } + + dispose(): void { + this.disposed = true; + clearInterval(this.refreshTimer); + if (this.resizeHandler) { + process.stdout.removeListener("resize", this.resizeHandler); + this.resizeHandler = null; + } + } + + handleInput(data: string): void { + if (matchesKey(data, Key.escape) || data === "q") { + this.dispose(); + this.onClose(); + return; + } + if (matchesKey(data, Key.down) || data === "j") { + this.scrollOffset++; + this.invalidate(); + this.tui.requestRender(); + return; + } + if (matchesKey(data, Key.up) || data === "k") { + this.scrollOffset = Math.max(0, this.scrollOffset - 1); + this.invalidate(); + this.tui.requestRender(); + return; + } + } + + invalidate(): void { + this.cachedLines = undefined; + } + + render(width: number): string[] { + if (this.cachedLines) return this.cachedLines; + + const t = this.theme; + const lines: string[] = []; + const w = Math.max(width, 60); + + // Header + const totalCost = this.workers.reduce((s, wk) => s + wk.cost, 0); + const aliveCount = this.workers.filter((wk) => wk.alive).length; + const now = new Date().toLocaleTimeString(); + + lines.push(t.bold(t.fg("accent", " GSD Parallel Monitor "))); + lines.push( + t.fg("muted", ` ${now} │ ${aliveCount}/${this.workers.length} alive │ Total: `) + + t.bold(`$${totalCost.toFixed(2)}`) + + t.fg("muted", " │ 5s refresh"), + ); + lines.push(t.fg("muted", "─".repeat(w))); + + if (this.workers.length === 0) { + lines.push(""); + lines.push(t.fg("warning", " No parallel workers found.")); + lines.push(t.fg("muted", " Run /gsd parallel start to begin.")); + } else { + for (const wk of this.workers) { + lines.push(""); + + // Health + ID + state + const healthColor = wk.alive ? "success" : "error"; + const glyph = healthGlyph(wk.alive, wk.heartbeatAge); + const stateText = wk.alive + ? t.fg("success", "RUNNING") + : t.fg("error", t.bold("DEAD")); + const heartbeatText = wk.heartbeatAge === Infinity + ? "never" + : formatDuration(wk.heartbeatAge) + " ago"; + + lines.push( + ` ${t.fg(healthColor, glyph)} ${t.bold(wk.mid)} ${stateText} ` + + t.fg("muted", `PID ${wk.pid} │ elapsed ${formatDuration(wk.elapsed)} │ `) + + `cost ${t.bold("$" + wk.cost.toFixed(2))} ` + + t.fg("muted", "│ heartbeat ") + t.fg(healthColor, heartbeatText), + ); + + // Current unit + if (wk.currentUnit) { + const phaseColor = + wk.unitType === "execute-task" ? "accent" + : wk.unitType === "research-slice" ? "warning" + : wk.unitType?.includes("complete") ? "success" + : "text"; + lines.push( + ` ${t.fg("muted", "▸")} ${t.fg(phaseColor, unitTypeLabel(wk.unitType))} ${wk.currentUnit} ` + + t.fg("muted", `(${formatDuration(wk.unitElapsed)})`), + ); + } else if (!wk.alive) { + lines.push(` ${t.fg("muted", "▸")} ${t.fg("error", "stopped")}`); + } else { + lines.push(` ${t.fg("muted", "▸ idle / between units")}`); + } + + // Slice progress chips + if (wk.slices.length > 0) { + const chips = wk.slices.map((s) => { + const pct = s.total > 0 ? s.done / s.total : 0; + const color = s.status === "complete" ? "success" : pct > 0 ? "warning" : "muted"; + return t.fg(color, `${s.id}:${s.done}/${s.total}`); + }); + lines.push(` ${t.fg("muted", "slices")} ${chips.join(" ")}`); + + // Task progress bar + const bar = progressBar(wk.doneTasks, wk.totalTasks, 25); + const pct = wk.totalTasks > 0 ? Math.round((wk.doneTasks / wk.totalTasks) * 100) : 0; + lines.push( + ` ${t.fg("muted", "tasks")} ${t.fg("success", bar)} ${wk.doneTasks}/${wk.totalTasks} ` + + t.fg("muted", `(${pct}%) │ slices done ${wk.doneSlices}/${wk.totalSlices}`), + ); + } + + // Errors + for (const err of wk.errors.slice(-2)) { + const truncated = err.length > w - 10 ? err.slice(0, w - 11) + "…" : err; + lines.push(` ${t.fg("error", "⚠ " + truncated)}`); + } + } + } + + // Event feed + lines.push(""); + lines.push(t.fg("muted", "─".repeat(w))); + lines.push(` ${t.bold("Recent Events")}`); + + if (this.events.length === 0) { + lines.push(t.fg("muted", " No events yet...")); + } else { + for (const evt of this.events.slice(-8)) { + const mid = evt.match(/^✓ (M\d+)\//)?.[1] || ""; + const truncated = evt.length > w - 10 ? evt.slice(0, w - 11) + "…" : evt; + lines.push(` ${t.fg("muted", "│")} ${t.fg("accent", mid)} ${truncated.replace(/^✓ M\d+\//, "")}`); + } + } + + // Footer + lines.push(""); + const allDone = this.workers.length > 0 && this.workers.every((wk) => !wk.alive); + if (allDone) { + lines.push(t.bold(t.fg("success", " ALL WORKERS COMPLETE"))); + for (const wk of this.workers) { + lines.push( + ` ${wk.mid} $${wk.cost.toFixed(2)} │ ${wk.doneSlices}/${wk.totalSlices} slices ` + + `${wk.doneTasks}/${wk.totalTasks} tasks │ ${formatDuration(wk.elapsed)}`, + ); + } + lines.push(` ${t.bold("Total: $" + this.workers.reduce((s, wk) => s + wk.cost, 0).toFixed(2))}`); + } + lines.push(t.fg("muted", " ESC/q to close │ ↑↓ scroll")); + + // Apply scroll — use terminal rows as height estimate + const termHeight = process.stdout.rows || 40; + const visible = lines.slice(this.scrollOffset, this.scrollOffset + termHeight); + this.cachedLines = visible; + return visible; + } +} diff --git a/src/resources/extensions/gsd/tests/parallel-monitor-overlay.test.ts b/src/resources/extensions/gsd/tests/parallel-monitor-overlay.test.ts new file mode 100644 index 000000000..38c657a76 --- /dev/null +++ b/src/resources/extensions/gsd/tests/parallel-monitor-overlay.test.ts @@ -0,0 +1,60 @@ +import { describe, it } from "node:test"; +import assert from "node:assert"; + +/** + * Basic tests for the parallel monitor overlay data helpers. + * The overlay is primarily a rendering component that reads existing + * status files — these tests verify the helper logic in isolation. + */ + +describe("parallel-monitor-overlay", () => { + it("progressBar generates correct width", async () => { + // Dynamic import to test the module loads cleanly + const mod = await import("../parallel-monitor-overlay.js"); + // Module should export the class + assert.ok(mod.ParallelMonitorOverlay, "ParallelMonitorOverlay class should be exported"); + }); + + it("ParallelMonitorOverlay can be instantiated with mock tui", async () => { + const mod = await import("../parallel-monitor-overlay.js"); + + let renderRequested = false; + const mockTui = { requestRender: () => { renderRequested = true; } }; + const mockTheme = { + fg: (_color: string, text: string) => text, + bold: (text: string) => text, + }; + let closed = false; + + const overlay = new mod.ParallelMonitorOverlay( + mockTui, + mockTheme as any, + () => { closed = true; }, + "/nonexistent/path", // basePath — no real data, tests empty state + ); + + // Should render without throwing + const lines = overlay.render(80); + assert.ok(Array.isArray(lines), "render should return an array"); + assert.ok(lines.length > 0, "render should return at least one line"); + + // Should contain header text + const joined = lines.join("\n"); + assert.ok(joined.includes("Parallel Monitor"), "should include title"); + assert.ok(joined.includes("No parallel workers found"), "should show empty state"); + + // Dispose should not throw + overlay.dispose(); + + // handleInput with ESC should call onClose + const overlay2 = new mod.ParallelMonitorOverlay( + mockTui, + mockTheme as any, + () => { closed = true; }, + "/nonexistent/path", + ); + overlay2.handleInput("q"); + assert.ok(closed, "pressing q should trigger onClose"); + overlay2.dispose(); + }); +}); From b6e105b0580a31d76e6bd31dc7b4ab815ba2a520 Mon Sep 17 00:00:00 2001 From: Iouri Goussev Date: Fri, 27 Mar 2026 16:51:49 -0400 Subject: [PATCH 23/27] perf(test): compile unit tests with esbuild, reclassify integration tests, fix node_modules symlink (#2809) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(test): wire src/resources/extensions/shared/tests/ into test:unit runner The test:unit glob excluded src/resources/extensions/shared/tests/ entirely, leaving format-utils.test.ts (and any future tests there) silently unfired. - Add shared/tests/*.test.ts to the test:unit glob in package.json - Export newestSrcMtime from ensure-workspace-builds.cjs (require.main guard prevents side-effects on require) so the staleness logic can be tested - Add src/tests/ensure-workspace-builds.test.ts covering newestSrcMtime: non-existent dir, no .ts files, single file, max of multiple, recursion, node_modules skip Closes #2808 * perf(test): compile unit tests with esbuild and fix dist-test/node_modules Replace per-file --experimental-strip-types with a single esbuild compilation step (scripts/compile-tests.mjs) that compiles all src/ TypeScript to dist-test/ in ~3s, then runs the pre-compiled JS. Eliminates ~1.7s Node startup overhead per test file. - scripts/compile-tests.mjs: esbuild compilation, asset copy, .ts→.js rewrite, stale file cleanup; creates dist-test/node_modules symlink so resource-loader.ts resolves gsdNodeModules to a real path (fixes node-modules-symlink test failure) - scripts/dist-test-resolve.mjs: ESM loader hook for @gsd/* bare specifiers and .ts→.js fallback rewriting at runtime - .gitignore: exclude dist-test/ from version control - package.json: add test:compile script; update test:unit to compile-then-run; update test:integration globs to cover new integration/ subdirectories - worker-registry.ts: unref() cleanup timer so it does not keep the Node process alive after tests complete Closes #2858 * fix(test): update relative imports in tests/integration/ after directory move When tests were moved from tests/ to tests/integration/ in the previous commit, relative imports weren't updated. ../foo now resolves one level too shallow. Fix all 117 import paths across 43 test files: - ../foo → ../../foo (source files at gsd/ level) - ../../get-secrets-from-user.ts → ../../../ (at extensions/ level) - ../../subagent/worker-registry.ts → ../../../ (at extensions/ level) - ./marketplace-test-fixtures.js → ../marketplace-test-fixtures.ts - ./test-helpers.ts → ../test-helpers.ts typecheck:extensions now passes with zero errors. Co-Authored-By: Claude Sonnet 4.6 * test(integration): set 10-minute timeout for integration test runner build job takes ~7min on main. Without a global timeout, hanging tests block the suite indefinitely. --test-timeout=600000 caps each test at 10min. Co-Authored-By: Claude Sonnet 4.6 * Revert "test(integration): set 10-minute timeout for integration test runner" This reverts commit be77ead77d369ad8569292ae6b69ba56435f5433. * fix(test): correct formatDuration(0) edge case and docker test root path - formatDuration(0) now returns '0s' instead of '0ms' by guarding the sub-second branch with ms > 0 - docker-template.test.ts root path goes ../../.. from dist-test/src/tests/ to reach project root instead of landing in dist-test/ - replace require() calls in skill-health.ts and visualizer-overlay.ts with proper ES module imports Co-Authored-By: Claude Sonnet 4.6 * fix(test): correct relative import paths in integration tests All affected tests were one directory level off — importing from ../web/ and ../resources/ when the correct paths are ../../web/ and ../../resources/. Tests live at src/tests/integration/, not src/tests/. Co-Authored-By: Claude Sonnet 4.6 * fix(test): add esbuild to root devDeps and wire dist-test-resolve hook P1: esbuild was only in web/package.json — compile-tests.mjs requires it at the root node_modules path, so CI failed on clean installs. P2: dist-test-resolve.mjs existed but was never loaded; @gsd/* imports in compiled tests resolved to installed workspace packages instead of freshly compiled dist-test output. Add --import to test:unit. Co-Authored-By: Claude Sonnet 4.6 * fix(deps): align esbuild version with lock file (0.25.12) ^0.27.4 didn't satisfy the existing lock file entry. Use the version already present so npm ci passes without regenerating the lock file. Co-Authored-By: Claude Sonnet 4.6 * fix(test): correct all relative import depths in src/tests/integration/ Tests in src/tests/integration/ need 3 levels up (../../..) to reach project-root dirs (web/, packages/) and 2 levels up (../..) to reach src-level dirs (src/web/, src/cli-web-branch.ts). Fixes: - ../../web/lib/ → ../../../web/lib/ (Next.js app, not src/web/) - ../../web/app/ → ../../../web/app/ - ../../packages/ → ../../../packages/ - ../cli-web-branch.ts → ../../cli-web-branch.ts - ../web-mode.ts → ../../web-mode.ts - ../resources/extensions/ → ../../resources/extensions/ - ci_monitor ROOT path: 2 levels up → 3 levels up - web-responsive WEB_ROOT: 2 levels up → 3 levels up Co-Authored-By: Claude Sonnet 4.6 * chore(test): use dot reporter for test:unit to reduce noise Co-Authored-By: Claude Sonnet 4.6 * chore(test): switch test:unit reporter to tap Co-Authored-By: Claude Sonnet 4.6 * chore(test): compact test reporter — silent on pass, failures + summary only Co-Authored-By: Claude Sonnet 4.6 * chore(test): include shared/tests in test:coverage Co-Authored-By: Claude Sonnet 4.6 * fix(test): correct path depths in tests moved to integration/ Tests moved from tests/ to tests/integration/ need one extra ../ to reach the same source files. Also fix web component paths — those files live at web/ not src/web/. Co-Authored-By: Claude Sonnet 4.6 * fix(test): fix web component paths in web-session-parity-contract Co-Authored-By: Claude Sonnet 4.6 * fix(test): use process.cwd() for project root in docker-template test Resolving relative to __dirname breaks under test:coverage which runs source files directly from src/tests/ — needs ../.. not ../../.. (the extra level only exists in the compiled dist-test/ output). Co-Authored-By: Claude Sonnet 4.6 * ci: retrigger CI --------- Co-authored-by: Claude Sonnet 4.6 --- .gitignore | 3 + package.json | 8 +- scripts/compile-tests.mjs | 214 ++++++++++++++++++ scripts/dist-test-resolve.mjs | 46 ++++ scripts/ensure-workspace-builds.cjs | 90 ++++---- scripts/test-reporter-compact.mjs | 44 ++++ src/resources/extensions/gsd/skill-health.ts | 4 +- .../all-milestones-complete-merge.test.ts | 6 +- .../atomic-task-closeout.test.ts | 2 +- .../{ => integration}/auto-preflight.test.ts | 2 +- .../{ => integration}/auto-recovery.test.ts | 14 +- .../auto-secrets-gate.test.ts | 4 +- .../auto-stash-merge.test.ts | 6 +- .../auto-worktree-milestone-merge.test.ts | 8 +- .../{ => integration}/auto-worktree.test.ts | 10 +- .../{ => integration}/continue-here.test.ts | 6 +- .../doctor-completion-deferral.test.ts | 2 +- .../doctor-delimiter-fix.test.ts | 2 +- .../doctor-enhancements.test.ts | 6 +- .../doctor-environment-worktree.test.ts | 2 +- .../doctor-environment.test.ts | 2 +- .../{ => integration}/doctor-fixlevel.test.ts | 4 +- .../{ => integration}/doctor-git.test.ts | 2 +- .../doctor-proactive.test.ts | 2 +- .../doctor-roadmap-summary-atomicity.test.ts | 2 +- .../{ => integration}/doctor-runtime.test.ts | 2 +- .../tests/{ => integration}/doctor.test.ts | 2 +- .../e2e-workflow-pipeline-integration.test.ts | 10 +- ...ature-branch-lifecycle-integration.test.ts | 8 +- .../{ => integration}/git-locale.test.ts | 8 +- .../{ => integration}/git-self-heal.test.ts | 2 +- .../{ => integration}/git-service.test.ts | 8 +- .../gitignore-tracked-gsd.test.ts | 4 +- .../{ => integration}/idle-recovery.test.ts | 6 +- .../inherited-repo-home-dir.test.ts | 2 +- .../integration-lifecycle.test.ts | 8 +- .../integration-mixed-milestones.test.ts | 12 +- .../integration-proof.test.ts | 24 +- .../{ => integration}/migrate-command.test.ts | 4 +- .../milestone-transition-worktree.test.ts | 6 +- .../{ => integration}/parallel-merge.test.ts | 6 +- ...rallel-workers-multi-milestone-e2e.test.ts | 6 +- .../gsd/tests/{ => integration}/paths.test.ts | 2 +- .../plugin-importer-live.test.ts | 4 +- .../queue-completed-milestone-perf.test.ts | 6 +- .../queue-reorder-e2e.test.ts | 10 +- .../quick-branch-lifecycle.test.ts | 10 +- .../tests/{ => integration}/run-uat.test.ts | 8 +- .../{ => integration}/token-savings.test.ts | 6 +- .../{ => integration}/worktree-e2e.test.ts | 8 +- .../extensions/gsd/visualizer-overlay.ts | 6 +- .../extensions/shared/format-utils.ts | 2 +- .../extensions/subagent/worker-registry.ts | 3 +- src/tests/docker-template.test.ts | 6 +- src/tests/ensure-workspace-builds.test.ts | 64 ++++++ .../{ => integration}/ci_monitor.test.ts | 2 +- .../{ => integration}/web-auth-token.test.ts | 0 .../{ => integration}/web-boot-node24.test.ts | 2 +- .../web-bridge-contract.test.ts | 10 +- .../web-bridge-package-root.test.ts | 2 +- .../web-bridge-terminal-contract.test.ts | 8 +- .../{ => integration}/web-cli-entry.test.ts | 2 +- .../web-command-parity-contract.test.ts | 10 +- .../web-continuity-contract.test.ts | 0 .../web-dashboard-rtk-contract.test.ts | 0 .../web-diagnostics-contract.test.ts | 8 +- .../web-live-interaction-contract.test.ts | 8 +- .../web-live-state-contract.test.ts | 12 +- .../{ => integration}/web-mode-cli.test.ts | 4 +- .../web-mode-network-flags.test.ts | 4 +- .../web-multi-project-contract.test.ts | 2 +- .../web-onboarding-contract.test.ts | 10 +- .../web-onboarding-presentation.test.ts | 2 +- .../web-project-discovery-contract.test.ts | 5 +- .../{ => integration}/web-project-url.test.ts | 2 +- .../web-recovery-diagnostics-contract.test.ts | 4 +- .../{ => integration}/web-responsive.test.ts | 2 +- .../web-session-parity-contract.test.ts | 22 +- .../web-state-surfaces-contract.test.ts | 62 ++--- .../web-subprocess-module-resolution.test.ts | 2 +- .../web-switch-project.test.ts | 0 .../web-terminal-allowlist.test.ts | 4 +- .../web-workflow-action-execution.test.ts | 2 +- .../web-workflow-controls-contract.test.ts | 2 +- 84 files changed, 657 insertions(+), 280 deletions(-) create mode 100644 scripts/compile-tests.mjs create mode 100644 scripts/dist-test-resolve.mjs create mode 100644 scripts/test-reporter-compact.mjs rename src/resources/extensions/gsd/tests/{ => integration}/all-milestones-complete-merge.test.ts (97%) rename src/resources/extensions/gsd/tests/{ => integration}/atomic-task-closeout.test.ts (97%) rename src/resources/extensions/gsd/tests/{ => integration}/auto-preflight.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/auto-recovery.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/auto-secrets-gate.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/auto-stash-merge.test.ts (95%) rename src/resources/extensions/gsd/tests/{ => integration}/auto-worktree-milestone-merge.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/auto-worktree.test.ts (97%) rename src/resources/extensions/gsd/tests/{ => integration}/continue-here.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-completion-deferral.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-delimiter-fix.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-enhancements.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-environment-worktree.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-environment.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-fixlevel.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-git.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-proactive.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-roadmap-summary-atomicity.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor-runtime.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/doctor.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/e2e-workflow-pipeline-integration.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/feature-branch-lifecycle-integration.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/git-locale.test.ts (93%) rename src/resources/extensions/gsd/tests/{ => integration}/git-self-heal.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/git-service.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/gitignore-tracked-gsd.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/idle-recovery.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/inherited-repo-home-dir.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/integration-lifecycle.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/integration-mixed-milestones.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/integration-proof.test.ts (97%) rename src/resources/extensions/gsd/tests/{ => integration}/migrate-command.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/milestone-transition-worktree.test.ts (97%) rename src/resources/extensions/gsd/tests/{ => integration}/parallel-merge.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/parallel-workers-multi-milestone-e2e.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/paths.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/plugin-importer-live.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/queue-completed-milestone-perf.test.ts (96%) rename src/resources/extensions/gsd/tests/{ => integration}/queue-reorder-e2e.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/quick-branch-lifecycle.test.ts (97%) rename src/resources/extensions/gsd/tests/{ => integration}/run-uat.test.ts (98%) rename src/resources/extensions/gsd/tests/{ => integration}/token-savings.test.ts (99%) rename src/resources/extensions/gsd/tests/{ => integration}/worktree-e2e.test.ts (97%) create mode 100644 src/tests/ensure-workspace-builds.test.ts rename src/tests/{ => integration}/ci_monitor.test.ts (98%) rename src/tests/{ => integration}/web-auth-token.test.ts (100%) rename src/tests/{ => integration}/web-boot-node24.test.ts (98%) rename src/tests/{ => integration}/web-bridge-contract.test.ts (98%) rename src/tests/{ => integration}/web-bridge-package-root.test.ts (97%) rename src/tests/{ => integration}/web-bridge-terminal-contract.test.ts (97%) rename src/tests/{ => integration}/web-cli-entry.test.ts (97%) rename src/tests/{ => integration}/web-command-parity-contract.test.ts (98%) rename src/tests/{ => integration}/web-continuity-contract.test.ts (100%) rename src/tests/{ => integration}/web-dashboard-rtk-contract.test.ts (100%) rename src/tests/{ => integration}/web-diagnostics-contract.test.ts (98%) rename src/tests/{ => integration}/web-live-interaction-contract.test.ts (99%) rename src/tests/{ => integration}/web-live-state-contract.test.ts (97%) rename src/tests/{ => integration}/web-mode-cli.test.ts (99%) rename src/tests/{ => integration}/web-mode-network-flags.test.ts (98%) rename src/tests/{ => integration}/web-multi-project-contract.test.ts (99%) rename src/tests/{ => integration}/web-onboarding-contract.test.ts (98%) rename src/tests/{ => integration}/web-onboarding-presentation.test.ts (97%) rename src/tests/{ => integration}/web-project-discovery-contract.test.ts (98%) rename src/tests/{ => integration}/web-project-url.test.ts (97%) rename src/tests/{ => integration}/web-recovery-diagnostics-contract.test.ts (98%) rename src/tests/{ => integration}/web-responsive.test.ts (99%) rename src/tests/{ => integration}/web-session-parity-contract.test.ts (97%) rename src/tests/{ => integration}/web-state-surfaces-contract.test.ts (90%) rename src/tests/{ => integration}/web-subprocess-module-resolution.test.ts (99%) rename src/tests/{ => integration}/web-switch-project.test.ts (100%) rename src/tests/{ => integration}/web-terminal-allowlist.test.ts (84%) rename src/tests/{ => integration}/web-workflow-action-execution.test.ts (97%) rename src/tests/{ => integration}/web-workflow-controls-contract.test.ts (98%) diff --git a/.gitignore b/.gitignore index 465c44380..e38b0e9bb 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,7 @@ +# ── Compiled test output ── +dist-test/ + # ── GSD project state (development-only, lives in worktree branches) ── package-lock.json .claude/ diff --git a/package.json b/package.json index 0c925eb9b..4b2bcf3d4 100644 --- a/package.json +++ b/package.json @@ -53,11 +53,12 @@ "copy-resources": "node scripts/copy-resources.cjs", "copy-themes": "node scripts/copy-themes.cjs", "copy-export-html": "node scripts/copy-export-html.cjs", - "test:unit": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*.test.ts src/resources/extensions/gsd/tests/*.test.mjs src/tests/*.test.ts", + "test:compile": "node scripts/compile-tests.mjs", + "test:unit": "npm run test:compile && node --import ./scripts/dist-test-resolve.mjs --experimental-test-isolation=process --test-reporter=./scripts/test-reporter-compact.mjs --test 'dist-test/src/tests/*.test.js' 'dist-test/src/resources/extensions/gsd/tests/*.test.js' 'dist-test/src/resources/extensions/gsd/tests/*.test.mjs' 'dist-test/src/resources/extensions/shared/tests/*.test.js' 'dist-test/src/resources/extensions/claude-code-cli/tests/*.test.js' 'dist-test/src/resources/extensions/github-sync/tests/*.test.js' 'dist-test/src/resources/extensions/universal-config/tests/*.test.js' 'dist-test/src/resources/extensions/voice/tests/*.test.js'", "test:packages": "node --test packages/pi-coding-agent/dist/core/*.test.js", "test:marketplace": "GSD_TEST_CLONE_MARKETPLACES=1 node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test src/resources/extensions/gsd/tests/claude-import-tui.test.ts src/resources/extensions/gsd/tests/plugin-importer-live.test.ts src/tests/marketplace-discovery.test.ts", - "test:coverage": "c8 --reporter=text --reporter=lcov --exclude='src/resources/extensions/gsd/tests/**' --exclude='src/tests/**' --exclude='scripts/**' --exclude='native/**' --exclude='node_modules/**' --check-coverage --statements=40 --lines=40 --branches=20 --functions=20 node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*.test.ts src/resources/extensions/gsd/tests/*.test.mjs src/tests/*.test.ts", - "test:integration": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*integration*.test.ts src/tests/integration/*.test.ts", + "test:coverage": "c8 --reporter=text --reporter=lcov --exclude='src/resources/extensions/gsd/tests/**' --exclude='src/tests/**' --exclude='scripts/**' --exclude='native/**' --exclude='node_modules/**' --check-coverage --statements=40 --lines=40 --branches=20 --functions=20 node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*.test.ts src/resources/extensions/gsd/tests/*.test.mjs src/tests/*.test.ts src/resources/extensions/shared/tests/*.test.ts", + "test:integration": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test 'src/tests/integration/*.test.ts' 'src/resources/extensions/gsd/tests/integration/*.test.ts' 'src/resources/extensions/async-jobs/*.test.ts' 'src/resources/extensions/browser-tools/tests/*.test.mjs'", "pretest": "npm run typecheck:extensions", "test": "npm run test:unit && npm run test:integration", "test:smoke": "node --experimental-strip-types tests/smoke/run.ts", @@ -136,6 +137,7 @@ "@types/node": "^24.12.0", "@types/picomatch": "^4.0.2", "c8": "^11.0.0", + "esbuild": "^0.25.12", "jiti": "^2.6.1", "typescript": "^5.4.0" }, diff --git a/scripts/compile-tests.mjs b/scripts/compile-tests.mjs new file mode 100644 index 000000000..066c02e9b --- /dev/null +++ b/scripts/compile-tests.mjs @@ -0,0 +1,214 @@ +#!/usr/bin/env node +/** + * Compile all TypeScript source + test files to dist-test/ using esbuild. + * Run compiled JS directly with node --test (no per-file TS overhead). + * + * Usage: node scripts/compile-tests.mjs + */ + +import { cp, mkdir, readdir, readFile, writeFile } from 'node:fs/promises'; +import { existsSync, symlinkSync } from 'node:fs'; +import { createRequire } from 'node:module'; +import { join } from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +const ROOT = join(__dirname, '..'); + +const require = createRequire(import.meta.url); +const esbuild = require(join(ROOT, 'node_modules/esbuild')); + +// Recursively collect files by extension (skip node_modules, templates, etc.) +// Directories to skip during file collection +const SKIP_DIRS = new Set(['node_modules', 'templates', '__tests__', 'integration']); + +async function collectFiles(dir, exts = ['.ts', '.mjs']) { + const results = []; + let entries; + try { + entries = await readdir(dir, { withFileTypes: true }); + } catch { + return results; + } + for (const entry of entries) { + if (SKIP_DIRS.has(entry.name)) continue; + const full = join(dir, entry.name); + if (entry.isDirectory()) { + results.push(...await collectFiles(full, exts)); + } else if ( + exts.some(ext => entry.name.endsWith(ext)) && + !entry.name.endsWith('.d.ts') + ) { + results.push(full); + } + } + return results; +} + +// Dirs to skip when copying assets (node_modules are never useful in dist-test) +const ASSET_SKIP_DIRS = new Set(['node_modules', '__tests__', 'integration']); + +/** + * Recursively copy files from srcDir to destDir. + * Skips node_modules only. Copies everything: .ts/.tsx originals (for jiti), + * .mjs helpers, .md/.yaml/.json assets, etc. + * esbuild compiled .js output already lands in dist-test, so we just + * overlay the asset files on top. + */ +async function copyAssets(srcDir, destDir) { + let entries; + try { + entries = await readdir(srcDir, { withFileTypes: true }); + } catch { + return; // directory doesn't exist, nothing to copy + } + for (const entry of entries) { + if (ASSET_SKIP_DIRS.has(entry.name)) continue; + const srcPath = join(srcDir, entry.name); + const destPath = join(destDir, entry.name); + if (entry.isDirectory()) { + await copyAssets(srcPath, destPath); + } else { + await mkdir(destDir, { recursive: true }); + await cp(srcPath, destPath, { force: true }); + } + } +} + +async function main() { + const start = Date.now(); + + // Collect entry points from src/ and packages/*/src/ + const srcFiles = await collectFiles(join(ROOT, 'src')); + + const packagesDir = join(ROOT, 'packages'); + const pkgEntries = await readdir(packagesDir, { withFileTypes: true }); + const packageFiles = []; + for (const entry of pkgEntries) { + if (!entry.isDirectory()) continue; + const pkgSrc = join(packagesDir, entry.name, 'src'); + packageFiles.push(...await collectFiles(pkgSrc)); + } + + // Also compile web/lib/ — some tests import from ../../web/lib/ + const webLibFiles = await collectFiles(join(ROOT, 'web', 'lib')); + + const entryPoints = [...srcFiles, ...packageFiles, ...webLibFiles]; + console.log(`Compiling ${entryPoints.length} files to dist-test/...`); + + // bundle:false transforms TypeScript but keeps import specifiers verbatim. + // We post-process the output to rewrite .ts → .js in import strings. + await esbuild.build({ + entryPoints, + outdir: join(ROOT, 'dist-test'), + outbase: ROOT, + bundle: false, + format: 'esm', + platform: 'node', + target: 'node22', + sourcemap: 'inline', + packages: 'external', + logLevel: 'warning', + }); + + // Copy non-compiled assets from src/ to dist-test/src/ maintaining structure. + // Tests use import.meta.url to resolve sibling .md, .yaml, .json, .ts etc. + // Also copy original .ts files — jiti-based imports load .ts source directly. + const srcDir = join(ROOT, 'src'); + const distSrcDir = join(ROOT, 'dist-test', 'src'); + await copyAssets(srcDir, distSrcDir); + console.log('Copied non-TS assets and .ts source files to dist-test/src/'); + + // Copy packages/*/src/ assets as well + for (const entry of pkgEntries) { + if (!entry.isDirectory()) continue; + const pkgSrc = join(packagesDir, entry.name, 'src'); + const pkgDistSrc = join(ROOT, 'dist-test', 'packages', entry.name, 'src'); + await copyAssets(pkgSrc, pkgDistSrc); + } + + // Copy web/lib/ assets (tests import from ../../web/lib/ relative to dist-test/src/tests/) + await copyAssets(join(ROOT, 'web', 'lib'), join(ROOT, 'dist-test', 'web', 'lib')); + + // Copy scripts/ non-TS files (.cjs etc) — some tests require() scripts directly + await copyAssets(join(ROOT, 'scripts'), join(ROOT, 'dist-test', 'scripts')); + + // Copy root package.json — some tests read it to check version/engines fields + await cp(join(ROOT, 'package.json'), join(ROOT, 'dist-test', 'package.json'), { force: true }); + + // Copy root dist/ into dist-test/dist/ — some tests compute projectRoot as + // 3 levels up from dist-test/src/tests/ which lands at dist-test/, then + // import from dist/mcp-server.js etc. + const rootDistDir = join(ROOT, 'dist'); + const distTestDistDir = join(ROOT, 'dist-test', 'dist'); + await copyAssets(rootDistDir, distTestDistDir); + + // Post-process: rewrite .ts import specifiers to .js in all compiled JS files. + // esbuild with bundle:false preserves original specifiers; Node can't load .ts. + const compiledJsFiles = await collectFiles(join(ROOT, 'dist-test'), ['.js']); + // Regex matches .ts in from/import() strings but not sourceMappingURL comments + const tsImportRe = /(from\s+["'])(\.\.?\/[^"']*?)\.ts(["'])/g; + const tsDynImportRe = /(import\(["'])(\.\.?\/[^"']*?)\.ts(["'])\)/g; + + let rewritten = 0; + await Promise.all(compiledJsFiles.map(async (file) => { + const src = await readFile(file, 'utf-8'); + const out = src + .replace(tsImportRe, (_, a, b, c) => `${a}${b}.js${c}`) + .replace(tsDynImportRe, (_, a, b, c) => `${a}${b}.js${c})`); + if (out !== src) { + await writeFile(file, out, 'utf-8'); + rewritten++; + } + })); + if (rewritten > 0) { + console.log(`Rewrote .ts → .js imports in ${rewritten} files`); + } + + // Remove stale compiled test files: dist-test entries whose source no longer exists + // in a non-integration source directory (e.g. test moved to integration/). + // Only cleans *.test.js and *.test.ts files to avoid touching non-test outputs. + const { rm } = await import('node:fs/promises'); + const { existsSync } = await import('node:fs'); + const testDirsToClean = [ + [join(ROOT, 'dist-test', 'src', 'tests'), join(ROOT, 'src', 'tests')], + [join(ROOT, 'dist-test', 'src', 'resources', 'extensions', 'gsd', 'tests'), + join(ROOT, 'src', 'resources', 'extensions', 'gsd', 'tests')], + ]; + let staleCleaned = 0; + for (const [distDir, srcDir] of testDirsToClean) { + let distEntries; + try { distEntries = await readdir(distDir, { withFileTypes: true }); } catch { continue; } + for (const entry of distEntries) { + if (!entry.isFile()) continue; + if (!entry.name.match(/\.test\.(js|ts)$/)) continue; + const stem = entry.name.replace(/\.(js|ts)$/, ''); + // Source could be .ts or .mjs (esbuild compiles both to .js) + const hasTsSrc = existsSync(join(srcDir, stem + '.ts')); + const hasMjsSrc = existsSync(join(srcDir, stem + '.mjs')); + if (!hasTsSrc && !hasMjsSrc) { + await rm(join(distDir, entry.name)); + staleCleaned++; + } + } + } + if (staleCleaned > 0) { + console.log(`Removed ${staleCleaned} stale compiled test files from dist-test/`); + } + + // Ensure dist-test/node_modules exists so resource-loader.ts (which computes + // packageRoot from import.meta.url) resolves gsdNodeModules to a real path. + // Without this, initResources creates dangling symlinks in test environments. + const distNodeModules = join(ROOT, 'dist-test', 'node_modules'); + if (!existsSync(distNodeModules)) { + symlinkSync(join(ROOT, 'node_modules'), distNodeModules); + } + + const elapsed = ((Date.now() - start) / 1000).toFixed(2); + console.log(`Done in ${elapsed}s`); +} + +main().catch(err => { + console.error(err); + process.exit(1); +}); diff --git a/scripts/dist-test-resolve.mjs b/scripts/dist-test-resolve.mjs new file mode 100644 index 000000000..a5d94da11 --- /dev/null +++ b/scripts/dist-test-resolve.mjs @@ -0,0 +1,46 @@ +/** + * Minimal Node.js import hook for running tests from dist-test/. + * + * esbuild with bundle:false preserves import specifiers verbatim, so compiled + * .js files still import '../foo.ts'. This hook redirects those to '.js' so + * Node can find the compiled output. + * + * Also redirects @gsd bare imports to their compiled counterparts in dist-test. + */ + +import { fileURLToPath, pathToFileURL } from 'node:url'; +import { existsSync } from 'node:fs'; +import { join } from 'node:path'; + +// dist-test root — everything compiled lands here +const DIST_TEST = new URL('../dist-test/', import.meta.url).href; + +// Absolute paths to compiled @gsd/* entry points +const GSD_ALIASES = { + '@gsd/pi-coding-agent': new URL('../dist-test/packages/pi-coding-agent/src/index.js', import.meta.url).href, + '@gsd/pi-ai/oauth': new URL('../dist-test/packages/pi-ai/src/utils/oauth/index.js', import.meta.url).href, + '@gsd/pi-ai': new URL('../dist-test/packages/pi-ai/src/index.js', import.meta.url).href, + '@gsd/pi-agent-core': new URL('../dist-test/packages/pi-agent-core/src/index.js', import.meta.url).href, + '@gsd/pi-tui': new URL('../dist-test/packages/pi-tui/src/index.js', import.meta.url).href, + '@gsd/native': new URL('../dist-test/packages/native/src/index.js', import.meta.url).href, +}; + +export function resolve(specifier, context, nextResolve) { + // 1. @gsd/* bare imports → compiled dist-test counterpart + if (specifier in GSD_ALIASES) { + return nextResolve(GSD_ALIASES[specifier], context); + } + + // 2. .ts relative imports inside dist-test → .js + if ( + specifier.endsWith('.ts') && + (specifier.startsWith('./') || specifier.startsWith('../')) && + context.parentURL && + context.parentURL.startsWith(DIST_TEST) + ) { + const jsSpecifier = specifier.slice(0, -3) + '.js'; + return nextResolve(jsSpecifier, context); + } + + return nextResolve(specifier, context); +} diff --git a/scripts/ensure-workspace-builds.cjs b/scripts/ensure-workspace-builds.cjs index 840a818d4..44f7ea2c4 100644 --- a/scripts/ensure-workspace-builds.cjs +++ b/scripts/ensure-workspace-builds.cjs @@ -18,25 +18,6 @@ const { existsSync, statSync, readdirSync } = require('fs') const { resolve, join } = require('path') const { execSync } = require('child_process') -const root = resolve(__dirname, '..') -const packagesDir = join(root, 'packages') - -// Skip if packages/ doesn't exist (published tarball / end-user install) -if (!existsSync(packagesDir)) process.exit(0) - -// Skip in CI — the pipeline runs `npm run build` explicitly -if (process.env.CI === 'true' || process.env.CI === '1') process.exit(0) - -// Workspace packages that need dist/index.js at runtime. -// Order matters: dependencies must build before dependents. -const WORKSPACE_PACKAGES = [ - 'native', - 'pi-tui', - 'pi-ai', - 'pi-agent-core', - 'pi-coding-agent', -] - /** * Returns the most recent mtime (ms) of any .ts file under dir, recursively. * Returns 0 if no .ts files found. @@ -56,31 +37,54 @@ function newestSrcMtime(dir) { return newest } -const stale = [] -for (const pkg of WORKSPACE_PACKAGES) { - const distIndex = join(packagesDir, pkg, 'dist', 'index.js') - if (!existsSync(distIndex)) { - stale.push(pkg) - continue +if (require.main === module) { + const root = resolve(__dirname, '..') + const packagesDir = join(root, 'packages') + + // Skip if packages/ doesn't exist (published tarball / end-user install) + if (!existsSync(packagesDir)) process.exit(0) + + // Skip in CI — the pipeline runs `npm run build` explicitly + if (process.env.CI === 'true' || process.env.CI === '1') process.exit(0) + + // Workspace packages that need dist/index.js at runtime. + // Order matters: dependencies must build before dependents. + const WORKSPACE_PACKAGES = [ + 'native', + 'pi-tui', + 'pi-ai', + 'pi-agent-core', + 'pi-coding-agent', + ] + + const stale = [] + for (const pkg of WORKSPACE_PACKAGES) { + const distIndex = join(packagesDir, pkg, 'dist', 'index.js') + if (!existsSync(distIndex)) { + stale.push(pkg) + continue + } + const distMtime = statSync(distIndex).mtimeMs + const srcMtime = newestSrcMtime(join(packagesDir, pkg, 'src')) + if (srcMtime > distMtime) { + stale.push(pkg) + } } - const distMtime = statSync(distIndex).mtimeMs - const srcMtime = newestSrcMtime(join(packagesDir, pkg, 'src')) - if (srcMtime > distMtime) { - stale.push(pkg) + + if (stale.length === 0) process.exit(0) + + process.stderr.write(` Building ${stale.length} workspace package(s) with stale or missing dist/: ${stale.join(', ')}\n`) + + for (const pkg of stale) { + const pkgDir = join(packagesDir, pkg) + try { + execSync('npm run build', { cwd: pkgDir, stdio: 'pipe' }) + process.stderr.write(` ✓ ${pkg}\n`) + } catch (err) { + process.stderr.write(` ✗ ${pkg} build failed: ${err.message}\n`) + // Non-fatal — the user can run `npm run build` manually + } } } -if (stale.length === 0) process.exit(0) - -process.stderr.write(` Building ${stale.length} workspace package(s) with stale or missing dist/: ${stale.join(', ')}\n`) - -for (const pkg of stale) { - const pkgDir = join(packagesDir, pkg) - try { - execSync('npm run build', { cwd: pkgDir, stdio: 'pipe' }) - process.stderr.write(` ✓ ${pkg}\n`) - } catch (err) { - process.stderr.write(` ✗ ${pkg} build failed: ${err.message}\n`) - // Non-fatal — the user can run `npm run build` manually - } -} +module.exports = { newestSrcMtime } diff --git a/scripts/test-reporter-compact.mjs b/scripts/test-reporter-compact.mjs new file mode 100644 index 000000000..ec87b221d --- /dev/null +++ b/scripts/test-reporter-compact.mjs @@ -0,0 +1,44 @@ +/** + * Compact test reporter: silent on pass, prints failures + final summary. + * Usage: --test-reporter=./scripts/test-reporter-compact.mjs + */ +import { Transform } from 'node:stream'; + +export default class CompactReporter extends Transform { + #pass = 0; + #fail = 0; + #skip = 0; + #failures = []; + + constructor() { + super({ objectMode: true }); + } + + _transform(event, _enc, cb) { + switch (event.type) { + case 'test:pass': + if (!event.data.skip) this.#pass++; + else this.#skip++; + break; + case 'test:fail': { + this.#fail++; + const { name, details } = event.data; + const err = details?.error; + const msg = err?.message ?? String(err ?? 'unknown'); + const loc = err?.cause?.stack?.split('\n')[1]?.trim() ?? ''; + this.#failures.push(` ✖ ${name}\n ${msg}${loc ? `\n ${loc}` : ''}`); + break; + } + } + cb(); + } + + _flush(cb) { + if (this.#failures.length) { + this.push(`\n✖ failing tests:\n${this.#failures.join('\n\n')}\n`); + } + const status = this.#fail === 0 ? '✔' : '✖'; + this.push(`\n${status} ${this.#pass} passed, ${this.#fail} failed, ${this.#skip} skipped\n`); + cb(); + } +} diff --git a/src/resources/extensions/gsd/skill-health.ts b/src/resources/extensions/gsd/skill-health.ts index a59f4d8aa..75217a5b6 100644 --- a/src/resources/extensions/gsd/skill-health.ts +++ b/src/resources/extensions/gsd/skill-health.ts @@ -13,7 +13,7 @@ * research identified as critical for skill quality. */ -import { existsSync, readFileSync, readdirSync } from "node:fs"; +import { existsSync, readFileSync, readdirSync, statSync } from "node:fs"; import { join } from "node:path"; import { homedir } from "node:os"; import type { UnitMetrics, MetricsLedger } from "./metrics.js"; @@ -210,7 +210,7 @@ export function formatSkillDetail(basePath: string, skillName: string): string { // Check for SKILL.md existence const skillPath = join(homedir(), ".agents", "skills", skillName, "SKILL.md"); if (existsSync(skillPath)) { - const stat = require("node:fs").statSync(skillPath); + const stat = statSync(skillPath); lines.push(""); lines.push(`SKILL.md: ${skillPath}`); lines.push(`Last modified: ${stat.mtime.toISOString().slice(0, 10)}`); diff --git a/src/resources/extensions/gsd/tests/all-milestones-complete-merge.test.ts b/src/resources/extensions/gsd/tests/integration/all-milestones-complete-merge.test.ts similarity index 97% rename from src/resources/extensions/gsd/tests/all-milestones-complete-merge.test.ts rename to src/resources/extensions/gsd/tests/integration/all-milestones-complete-merge.test.ts index 0b06d721b..d3a0c7c2e 100644 --- a/src/resources/extensions/gsd/tests/all-milestones-complete-merge.test.ts +++ b/src/resources/extensions/gsd/tests/integration/all-milestones-complete-merge.test.ts @@ -31,7 +31,7 @@ import { isInAutoWorktree, getAutoWorktreeOriginalBase, mergeMilestoneToMain, -} from "../auto-worktree.ts"; +} from "../../auto-worktree.ts"; const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -78,9 +78,9 @@ function createMilestoneArtifacts(dir: string, mid: string): void { // ─── Source-level: verify the merge code exists in the "all complete" path ──── test("auto-loop 'all milestones complete' path merges before stopping (#962)", () => { - const loopSrc = readFileSync(join(__dirname, "..", "auto", "phases.ts"), "utf-8"); + const loopSrc = readFileSync(join(__dirname, "../..", "auto", "phases.ts"), "utf-8"); const resolverSrc = readFileSync( - join(__dirname, "..", "worktree-resolver.ts"), + join(__dirname, "../..", "worktree-resolver.ts"), "utf-8", ); diff --git a/src/resources/extensions/gsd/tests/atomic-task-closeout.test.ts b/src/resources/extensions/gsd/tests/integration/atomic-task-closeout.test.ts similarity index 97% rename from src/resources/extensions/gsd/tests/atomic-task-closeout.test.ts rename to src/resources/extensions/gsd/tests/integration/atomic-task-closeout.test.ts index 3e1c58753..e6c4143d8 100644 --- a/src/resources/extensions/gsd/tests/atomic-task-closeout.test.ts +++ b/src/resources/extensions/gsd/tests/integration/atomic-task-closeout.test.ts @@ -9,7 +9,7 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import test from "node:test"; import assert from "node:assert/strict"; -import { runGSDDoctor } from "../doctor.ts"; +import { runGSDDoctor } from "../../doctor.ts"; function makeTmp(name: string): string { const dir = join(tmpdir(), `atomic-closeout-${name}-${Date.now()}-${Math.random().toString(36).slice(2)}`); diff --git a/src/resources/extensions/gsd/tests/auto-preflight.test.ts b/src/resources/extensions/gsd/tests/integration/auto-preflight.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/auto-preflight.test.ts rename to src/resources/extensions/gsd/tests/integration/auto-preflight.test.ts index 63eb7e60a..1a332c6eb 100644 --- a/src/resources/extensions/gsd/tests/auto-preflight.test.ts +++ b/src/resources/extensions/gsd/tests/integration/auto-preflight.test.ts @@ -4,7 +4,7 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs"; import { join } from "node:path"; import { tmpdir } from "node:os"; -import { runGSDDoctor, selectDoctorScope, filterDoctorIssues } from "../doctor.js"; +import { runGSDDoctor, selectDoctorScope, filterDoctorIssues } from "../../doctor.js"; test("auto-preflight scopes to active milestone, ignoring historical", async (t) => { const tmpBase = mkdtempSync(join(tmpdir(), "gsd-auto-preflight-test-")); diff --git a/src/resources/extensions/gsd/tests/auto-recovery.test.ts b/src/resources/extensions/gsd/tests/integration/auto-recovery.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/auto-recovery.test.ts rename to src/resources/extensions/gsd/tests/integration/auto-recovery.test.ts index a71882f3a..8aef15b20 100644 --- a/src/resources/extensions/gsd/tests/auto-recovery.test.ts +++ b/src/resources/extensions/gsd/tests/integration/auto-recovery.test.ts @@ -11,19 +11,19 @@ import { diagnoseExpectedArtifact, buildLoopRemediationSteps, hasImplementationArtifacts, -} from "../auto-recovery.ts"; -import { parseRoadmap, parsePlan } from "../parsers-legacy.ts"; -import { parseTaskPlanFile, clearParseCache } from "../files.ts"; -import { invalidateAllCaches } from "../cache.ts"; -import { deriveState, invalidateStateCache } from "../state.ts"; +} from "../../auto-recovery.ts"; +import { parseRoadmap, parsePlan } from "../../parsers-legacy.ts"; +import { parseTaskPlanFile, clearParseCache } from "../../files.ts"; +import { invalidateAllCaches } from "../../cache.ts"; +import { deriveState, invalidateStateCache } from "../../state.ts"; import { openDatabase, closeDatabase, insertMilestone, insertSlice, insertTask, -} from "../gsd-db.ts"; -import { renderPlanFromDb } from "../markdown-renderer.ts"; +} from "../../gsd-db.ts"; +import { renderPlanFromDb } from "../../markdown-renderer.ts"; function makeTmpBase(): string { const base = join(tmpdir(), `gsd-test-${randomUUID()}`); diff --git a/src/resources/extensions/gsd/tests/auto-secrets-gate.test.ts b/src/resources/extensions/gsd/tests/integration/auto-secrets-gate.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/auto-secrets-gate.test.ts rename to src/resources/extensions/gsd/tests/integration/auto-secrets-gate.test.ts index 1c970123d..6807647cf 100644 --- a/src/resources/extensions/gsd/tests/auto-secrets-gate.test.ts +++ b/src/resources/extensions/gsd/tests/integration/auto-secrets-gate.test.ts @@ -16,8 +16,8 @@ import assert from 'node:assert/strict'; import { mkdirSync, writeFileSync, readFileSync, rmSync } from 'node:fs'; import { join } from 'node:path'; import { tmpdir } from 'node:os'; -import { getManifestStatus } from '../files.ts'; -import { collectSecretsFromManifest } from '../../get-secrets-from-user.ts'; +import { getManifestStatus } from '../../files.ts'; +import { collectSecretsFromManifest } from '../../../get-secrets-from-user.ts'; function makeTempDir(prefix: string): string { const dir = join(tmpdir(), `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2)}`); diff --git a/src/resources/extensions/gsd/tests/auto-stash-merge.test.ts b/src/resources/extensions/gsd/tests/integration/auto-stash-merge.test.ts similarity index 95% rename from src/resources/extensions/gsd/tests/auto-stash-merge.test.ts rename to src/resources/extensions/gsd/tests/integration/auto-stash-merge.test.ts index 5152ba930..71c9173fd 100644 --- a/src/resources/extensions/gsd/tests/auto-stash-merge.test.ts +++ b/src/resources/extensions/gsd/tests/integration/auto-stash-merge.test.ts @@ -12,8 +12,8 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import { execSync } from "node:child_process"; -import { createAutoWorktree, mergeMilestoneToMain } from "../auto-worktree.ts"; -import { nativeMergeSquash } from "../native-git-bridge.ts"; +import { createAutoWorktree, mergeMilestoneToMain } from "../../auto-worktree.ts"; +import { nativeMergeSquash } from "../../native-git-bridge.ts"; function run(cmd: string, cwd: string): string { return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); @@ -88,7 +88,7 @@ test("#2151 bug 1: auto-stash unblocks merge when unrelated files are dirty", () }); test("#2151 bug 2: nativeMergeSquash returns dirty filenames", async () => { - const { nativeMergeSquash } = await import("../native-git-bridge.ts"); + const { nativeMergeSquash } = await import("../../native-git-bridge.ts"); const repo = createTempRepo(); try { run("git checkout -b milestone/M210", repo); diff --git a/src/resources/extensions/gsd/tests/auto-worktree-milestone-merge.test.ts b/src/resources/extensions/gsd/tests/integration/auto-worktree-milestone-merge.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/auto-worktree-milestone-merge.test.ts rename to src/resources/extensions/gsd/tests/integration/auto-worktree-milestone-merge.test.ts index 87af75fa0..bf11a5109 100644 --- a/src/resources/extensions/gsd/tests/auto-worktree-milestone-merge.test.ts +++ b/src/resources/extensions/gsd/tests/integration/auto-worktree-milestone-merge.test.ts @@ -21,9 +21,9 @@ import { createAutoWorktree, mergeMilestoneToMain, getAutoWorktreeOriginalBase, -} from "../auto-worktree.ts"; -import { getSliceBranchName } from "../worktree.ts"; -import { nativeMergeSquash } from "../native-git-bridge.ts"; +} from "../../auto-worktree.ts"; +import { getSliceBranchName } from "../../worktree.ts"; +import { nativeMergeSquash } from "../../native-git-bridge.ts"; function run(cmd: string, cwd: string): string { // Safe: all inputs are hardcoded test strings, not user input @@ -329,7 +329,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { }); test("#1738 bug 1: nativeMergeSquash detects dirty working tree", async () => { - const { nativeMergeSquash } = await import("../native-git-bridge.ts"); + const { nativeMergeSquash } = await import("../../native-git-bridge.ts"); const repo = freshRepo(); run("git checkout -b milestone/M070", repo); diff --git a/src/resources/extensions/gsd/tests/auto-worktree.test.ts b/src/resources/extensions/gsd/tests/integration/auto-worktree.test.ts similarity index 97% rename from src/resources/extensions/gsd/tests/auto-worktree.test.ts rename to src/resources/extensions/gsd/tests/integration/auto-worktree.test.ts index 3a524f0c3..38aa285b6 100644 --- a/src/resources/extensions/gsd/tests/auto-worktree.test.ts +++ b/src/resources/extensions/gsd/tests/integration/auto-worktree.test.ts @@ -20,7 +20,7 @@ import { enterAutoWorktree, getAutoWorktreeOriginalBase, getActiveAutoWorktreeContext, -} from "../auto-worktree.ts"; +} from "../../auto-worktree.ts"; // Note: execSync is used intentionally in tests for git operations with // controlled, hardcoded inputs (no user input). This is safe and matches @@ -150,7 +150,7 @@ describe("auto-worktree lifecycle", () => { run("git commit -m \"add milestone\"", tempDir); // Import createWorktree directly for manual worktree - const { createWorktree } = await import("../worktree-manager.ts"); + const { createWorktree } = await import("../../worktree-manager.ts"); // Create manual worktree (uses worktree/ branch) const manualWt = createWorktree(tempDir, "feature-x"); @@ -164,7 +164,7 @@ describe("auto-worktree lifecycle", () => { // Cleanup both teardownAutoWorktree(tempDir, "M003"); - const { removeWorktree } = await import("../worktree-manager.ts"); + const { removeWorktree } = await import("../../worktree-manager.ts"); removeWorktree(tempDir, "feature-x"); }); @@ -190,7 +190,7 @@ describe("auto-worktree lifecycle", () => { run("git add .", tempDir); run("git commit -m \"add milestone\"", tempDir); - const { GitServiceImpl } = await import("../git-service.ts"); + const { GitServiceImpl } = await import("../../git-service.ts"); // Create worktree const wtPath = createAutoWorktree(tempDir, "M005"); @@ -215,7 +215,7 @@ describe("auto-worktree lifecycle", () => { run("git commit -m \"add milestone\"", tempDir); // Simulate a crash leaving a stale directory with no .git file. - const { worktreePath } = await import("../worktree-manager.ts"); + const { worktreePath } = await import("../../worktree-manager.ts"); const staleDir = worktreePath(tempDir, "M010"); mkdirSync(staleDir, { recursive: true }); writeFileSync(join(staleDir, "orphan.txt"), "stale leftover\n"); diff --git a/src/resources/extensions/gsd/tests/continue-here.test.ts b/src/resources/extensions/gsd/tests/integration/continue-here.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/continue-here.test.ts rename to src/resources/extensions/gsd/tests/integration/continue-here.test.ts index ac28629fa..94f90aab8 100644 --- a/src/resources/extensions/gsd/tests/continue-here.test.ts +++ b/src/resources/extensions/gsd/tests/integration/continue-here.test.ts @@ -12,7 +12,7 @@ import { describe, it } from "node:test"; import assert from "node:assert/strict"; -import { computeBudgets } from "../context-budget.js"; +import { computeBudgets } from "../../context-budget.js"; // ─── Pure threshold / pipeline tests ────────────────────────────────────────── // These test the budget engine outputs that the continue-here monitor relies on. @@ -164,7 +164,7 @@ describe("continue-here", () => { describe("continueHereFired runtime record field", () => { it("AutoUnitRuntimeRecord includes continueHereFired with default false", async (t) => { // Import writeUnitRuntimeRecord to verify the field is present and defaults - const { writeUnitRuntimeRecord, readUnitRuntimeRecord, clearUnitRuntimeRecord } = await import("../unit-runtime.js"); + const { writeUnitRuntimeRecord, readUnitRuntimeRecord, clearUnitRuntimeRecord } = await import("../../unit-runtime.js"); const fs = await import("node:fs"); const path = await import("node:path"); const os = await import("node:os"); @@ -202,7 +202,7 @@ describe("continue-here", () => { describe("context-pressure monitor integration", () => { it("should fire wrap-up when context >= threshold and mark continueHereFired", async (t) => { - const { writeUnitRuntimeRecord, readUnitRuntimeRecord, clearUnitRuntimeRecord } = await import("../unit-runtime.js"); + const { writeUnitRuntimeRecord, readUnitRuntimeRecord, clearUnitRuntimeRecord } = await import("../../unit-runtime.js"); const fs = await import("node:fs"); const path = await import("node:path"); const os = await import("node:os"); diff --git a/src/resources/extensions/gsd/tests/doctor-completion-deferral.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-completion-deferral.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/doctor-completion-deferral.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-completion-deferral.test.ts index 35623e2e3..809562d10 100644 --- a/src/resources/extensions/gsd/tests/doctor-completion-deferral.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-completion-deferral.test.ts @@ -10,7 +10,7 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import test from "node:test"; import assert from "node:assert/strict"; -import { runGSDDoctor } from "../doctor.ts"; +import { runGSDDoctor } from "../../doctor.ts"; function makeTmp(name: string): string { const dir = join(tmpdir(), `doctor-deferral-${name}-${Date.now()}-${Math.random().toString(36).slice(2)}`); diff --git a/src/resources/extensions/gsd/tests/doctor-delimiter-fix.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-delimiter-fix.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/doctor-delimiter-fix.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-delimiter-fix.test.ts index 47b75723a..4a042990a 100644 --- a/src/resources/extensions/gsd/tests/doctor-delimiter-fix.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-delimiter-fix.test.ts @@ -10,7 +10,7 @@ import assert from "node:assert/strict"; import { mkdtempSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs"; import { join } from "node:path"; import { tmpdir } from "node:os"; -import { runGSDDoctor } from "../doctor.js"; +import { runGSDDoctor } from "../../doctor.js"; test("doctor fix=true sanitizes em-dash in milestone title", async (t) => { const tmpBase = mkdtempSync(join(tmpdir(), "gsd-doctor-delim-")); diff --git a/src/resources/extensions/gsd/tests/doctor-enhancements.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-enhancements.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/doctor-enhancements.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-enhancements.test.ts index 352664afe..ba8734f30 100644 --- a/src/resources/extensions/gsd/tests/doctor-enhancements.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-enhancements.test.ts @@ -4,8 +4,8 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync, existsSync } from "node: import { join } from "node:path"; import { tmpdir } from "node:os"; -import { runGSDDoctor } from "../doctor.js"; -import { formatDoctorReportJson } from "../doctor-format.js"; +import { runGSDDoctor } from "../../doctor.js"; +import { formatDoctorReportJson } from "../../doctor-format.js"; // ── Helpers ───────────────────────────────────────────────────────────────── function makeBase(): { base: string; gsd: string; mDir: string } { @@ -230,7 +230,7 @@ describe('doctor-enhancements', async () => { const historyPath = join(gsd, "doctor-history.jsonl"); assert.ok(existsSync(historyPath), "doctor-history.jsonl is created after run"); - const { readDoctorHistory } = await import("../doctor.js"); + const { readDoctorHistory } = await import("../../doctor.js"); const history = await readDoctorHistory(base); assert.ok(history.length >= 1, "history has at least one entry"); assert.ok(typeof history[0]?.ts === "string", "history entry has ts field"); diff --git a/src/resources/extensions/gsd/tests/doctor-environment-worktree.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-environment-worktree.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/doctor-environment-worktree.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-environment-worktree.test.ts index 702e4ee6a..fe3ea7614 100644 --- a/src/resources/extensions/gsd/tests/doctor-environment-worktree.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-environment-worktree.test.ts @@ -20,7 +20,7 @@ import { runEnvironmentChecks, environmentResultsToDoctorIssues, checkEnvironmentHealth, -} from "../doctor-environment.ts"; +} from "../../doctor-environment.ts"; /** Create a directory tree with files. */ function createDir(files: Record = {}): string { const dir = mkdtempSync(join(tmpdir(), "gsd-wt-env-")); diff --git a/src/resources/extensions/gsd/tests/doctor-environment.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-environment.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/doctor-environment.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-environment.test.ts index af55c2f66..99fa35363 100644 --- a/src/resources/extensions/gsd/tests/doctor-environment.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-environment.test.ts @@ -26,7 +26,7 @@ import { formatEnvironmentReport, checkEnvironmentHealth, type EnvironmentCheckResult, -} from "../doctor-environment.ts"; +} from "../../doctor-environment.ts"; function createProjectDir(files: Record = {}): string { const dir = mkdtempSync(join(tmpdir(), "gsd-env-test-")); for (const [name, content] of Object.entries(files)) { diff --git a/src/resources/extensions/gsd/tests/doctor-fixlevel.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-fixlevel.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/doctor-fixlevel.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-fixlevel.test.ts index a1d5a4aba..7b43459c6 100644 --- a/src/resources/extensions/gsd/tests/doctor-fixlevel.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-fixlevel.test.ts @@ -14,8 +14,8 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import test from "node:test"; import assert from "node:assert/strict"; -import { runGSDDoctor } from "../doctor.ts"; -import { closeDatabase } from "../gsd-db.ts"; +import { runGSDDoctor } from "../../doctor.ts"; +import { closeDatabase } from "../../gsd-db.ts"; function makeTmp(name: string): string { const dir = join(tmpdir(), `doctor-fixlevel-${name}-${Date.now()}-${Math.random().toString(36).slice(2)}`); diff --git a/src/resources/extensions/gsd/tests/doctor-git.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-git.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/doctor-git.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-git.test.ts index 9b87d2714..d307627a3 100644 --- a/src/resources/extensions/gsd/tests/doctor-git.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-git.test.ts @@ -15,7 +15,7 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import { execSync } from "node:child_process"; -import { runGSDDoctor } from "../doctor.ts"; +import { runGSDDoctor } from "../../doctor.ts"; function run(cmd: string, cwd: string): string { return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); } diff --git a/src/resources/extensions/gsd/tests/doctor-proactive.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-proactive.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/doctor-proactive.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-proactive.test.ts index 29be69b33..af04680ca 100644 --- a/src/resources/extensions/gsd/tests/doctor-proactive.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-proactive.test.ts @@ -23,7 +23,7 @@ import { checkHealEscalation, resetProactiveHealing, formatHealthSummary, -} from "../doctor-proactive.ts"; +} from "../../doctor-proactive.ts"; function run(cmd: string, cwd: string): string { return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); } diff --git a/src/resources/extensions/gsd/tests/doctor-roadmap-summary-atomicity.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-roadmap-summary-atomicity.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/doctor-roadmap-summary-atomicity.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-roadmap-summary-atomicity.test.ts index 140db7f0c..40dc6ffd9 100644 --- a/src/resources/extensions/gsd/tests/doctor-roadmap-summary-atomicity.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-roadmap-summary-atomicity.test.ts @@ -12,7 +12,7 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import test from "node:test"; import assert from "node:assert/strict"; -import { runGSDDoctor } from "../doctor.ts"; +import { runGSDDoctor } from "../../doctor.ts"; function makeTmp(name: string): string { const dir = join(tmpdir(), `doctor-roadmap-summary-${name}-${Date.now()}-${Math.random().toString(36).slice(2)}`); diff --git a/src/resources/extensions/gsd/tests/doctor-runtime.test.ts b/src/resources/extensions/gsd/tests/integration/doctor-runtime.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/doctor-runtime.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor-runtime.test.ts index a8f560cf6..8d55fd621 100644 --- a/src/resources/extensions/gsd/tests/doctor-runtime.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor-runtime.test.ts @@ -14,7 +14,7 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import { execSync } from "node:child_process"; -import { runGSDDoctor } from "../doctor.ts"; +import { runGSDDoctor } from "../../doctor.ts"; function run(cmd: string, cwd: string): string { return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); } diff --git a/src/resources/extensions/gsd/tests/doctor.test.ts b/src/resources/extensions/gsd/tests/integration/doctor.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/doctor.test.ts rename to src/resources/extensions/gsd/tests/integration/doctor.test.ts index e9a33c28d..7eb482c85 100644 --- a/src/resources/extensions/gsd/tests/doctor.test.ts +++ b/src/resources/extensions/gsd/tests/integration/doctor.test.ts @@ -4,7 +4,7 @@ import { mkdtempSync, mkdirSync, readFileSync, rmSync, writeFileSync, existsSync import { join } from "node:path"; import { tmpdir } from "node:os"; -import { formatDoctorReport, runGSDDoctor, summarizeDoctorIssues, filterDoctorIssues, selectDoctorScope, validateTitle } from "../doctor.js"; +import { formatDoctorReport, runGSDDoctor, summarizeDoctorIssues, filterDoctorIssues, selectDoctorScope, validateTitle } from "../../doctor.js"; const tmpBase = mkdtempSync(join(tmpdir(), "gsd-doctor-test-")); const gsd = join(tmpBase, ".gsd"); const mDir = join(gsd, "milestones", "M001"); diff --git a/src/resources/extensions/gsd/tests/e2e-workflow-pipeline-integration.test.ts b/src/resources/extensions/gsd/tests/integration/e2e-workflow-pipeline-integration.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/e2e-workflow-pipeline-integration.test.ts rename to src/resources/extensions/gsd/tests/integration/e2e-workflow-pipeline-integration.test.ts index 419ac5762..4b3ae61be 100644 --- a/src/resources/extensions/gsd/tests/e2e-workflow-pipeline-integration.test.ts +++ b/src/resources/extensions/gsd/tests/integration/e2e-workflow-pipeline-integration.test.ts @@ -34,11 +34,11 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import { stringify, parse } from "yaml"; -import { CustomWorkflowEngine } from "../custom-workflow-engine.ts"; -import { CustomExecutionPolicy } from "../custom-execution-policy.ts"; -import { createRun, listRuns } from "../run-manager.ts"; -import { readGraph, writeGraph } from "../graph.ts"; -import { validateDefinition } from "../definition-loader.ts"; +import { CustomWorkflowEngine } from "../../custom-workflow-engine.ts"; +import { CustomExecutionPolicy } from "../../custom-execution-policy.ts"; +import { createRun, listRuns } from "../../run-manager.ts"; +import { readGraph, writeGraph } from "../../graph.ts"; +import { validateDefinition } from "../../definition-loader.ts"; // ─── Helpers ───────────────────────────────────────────────────────────── diff --git a/src/resources/extensions/gsd/tests/feature-branch-lifecycle-integration.test.ts b/src/resources/extensions/gsd/tests/integration/feature-branch-lifecycle-integration.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/feature-branch-lifecycle-integration.test.ts rename to src/resources/extensions/gsd/tests/integration/feature-branch-lifecycle-integration.test.ts index 6794a6ea9..e6cb849a8 100644 --- a/src/resources/extensions/gsd/tests/feature-branch-lifecycle-integration.test.ts +++ b/src/resources/extensions/gsd/tests/integration/feature-branch-lifecycle-integration.test.ts @@ -26,10 +26,10 @@ import { createAutoWorktree, mergeMilestoneToMain, autoWorktreeBranch, -} from "../auto-worktree.ts"; -import { captureIntegrationBranch, getSliceBranchName } from "../worktree.ts"; -import { writeIntegrationBranch, readIntegrationBranch } from "../git-service.ts"; -import { nextMilestoneId, generateMilestoneSuffix } from "../guided-flow.ts"; +} from "../../auto-worktree.ts"; +import { captureIntegrationBranch, getSliceBranchName } from "../../worktree.ts"; +import { writeIntegrationBranch, readIntegrationBranch } from "../../git-service.ts"; +import { nextMilestoneId, generateMilestoneSuffix } from "../../guided-flow.ts"; // ─── Helpers ──────────────────────────────────────────────────────────────── diff --git a/src/resources/extensions/gsd/tests/git-locale.test.ts b/src/resources/extensions/gsd/tests/integration/git-locale.test.ts similarity index 93% rename from src/resources/extensions/gsd/tests/git-locale.test.ts rename to src/resources/extensions/gsd/tests/integration/git-locale.test.ts index ef668e1de..e385ea287 100644 --- a/src/resources/extensions/gsd/tests/git-locale.test.ts +++ b/src/resources/extensions/gsd/tests/integration/git-locale.test.ts @@ -12,9 +12,9 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import { execFileSync } from "node:child_process"; -import { GIT_NO_PROMPT_ENV } from "../git-constants.ts"; -import { nativeAddAllWithExclusions } from "../native-git-bridge.ts"; -import { RUNTIME_EXCLUSION_PATHS } from "../git-service.ts"; +import { GIT_NO_PROMPT_ENV } from "../../git-constants.ts"; +import { nativeAddAllWithExclusions } from "../../native-git-bridge.ts"; +import { RUNTIME_EXCLUSION_PATHS } from "../../git-service.ts"; function git(cwd: string, ...args: string[]): string { return execFileSync("git", args, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); } @@ -101,7 +101,7 @@ describe('git-locale', async () => { // We verify indirectly: the source code must pass env: GIT_NO_PROMPT_ENV. // Read the source and check for the pattern. This is a static check. const src = readFileSync( - join(import.meta.dirname, "..", "native-git-bridge.ts"), + join(import.meta.dirname, "../..", "native-git-bridge.ts"), "utf-8" ); diff --git a/src/resources/extensions/gsd/tests/git-self-heal.test.ts b/src/resources/extensions/gsd/tests/integration/git-self-heal.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/git-self-heal.test.ts rename to src/resources/extensions/gsd/tests/integration/git-self-heal.test.ts index 58bf81d59..092cde31c 100644 --- a/src/resources/extensions/gsd/tests/git-self-heal.test.ts +++ b/src/resources/extensions/gsd/tests/integration/git-self-heal.test.ts @@ -14,7 +14,7 @@ import assert from "node:assert/strict"; import { abortAndReset, formatGitError, -} from "../git-self-heal.js"; +} from "../../git-self-heal.js"; // ─── Helpers ───────────────────────────────────────────────────────── diff --git a/src/resources/extensions/gsd/tests/git-service.test.ts b/src/resources/extensions/gsd/tests/integration/git-service.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/git-service.test.ts rename to src/resources/extensions/gsd/tests/integration/git-service.test.ts index 2a5587d9b..d1ba7a7ff 100644 --- a/src/resources/extensions/gsd/tests/git-service.test.ts +++ b/src/resources/extensions/gsd/tests/integration/git-service.test.ts @@ -20,8 +20,8 @@ import { type CommitOptions, type PreMergeCheckResult, type TaskCommitContext, -} from "../git-service.ts"; -import { nativeAddAllWithExclusions } from "../native-git-bridge.ts"; +} from "../../git-service.ts"; +import { nativeAddAllWithExclusions } from "../../native-git-bridge.ts"; function run(command: string, cwd: string): string { return execSync(command, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); } @@ -1113,7 +1113,7 @@ describe('git-service', async () => { // ─── untrackRuntimeFiles: removes tracked runtime files from index ─── test('untrackRuntimeFiles', async () => { - const { untrackRuntimeFiles } = await import("../gitignore.ts"); + const { untrackRuntimeFiles } = await import("../../gitignore.ts"); const repo = mkdtempSync(join(tmpdir(), "gsd-untrack-")); runGit(repo, ["init", "-b", "main"]); runGit(repo, ["config", "user.email", "test@test.com"]); @@ -1222,7 +1222,7 @@ describe('git-service', async () => { // ─── ensureGitignore: always adds .gsd to gitignore ────────────────── test('ensureGitignore: adds .gsd entry', async () => { - const { ensureGitignore } = await import("../gitignore.ts"); + const { ensureGitignore } = await import("../../gitignore.ts"); const repo = mkdtempSync(join(tmpdir(), "gsd-gitignore-external-state-")); // Should add .gsd to gitignore (external state dir is a symlink) diff --git a/src/resources/extensions/gsd/tests/gitignore-tracked-gsd.test.ts b/src/resources/extensions/gsd/tests/integration/gitignore-tracked-gsd.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/gitignore-tracked-gsd.test.ts rename to src/resources/extensions/gsd/tests/integration/gitignore-tracked-gsd.test.ts index b73512e3d..ed0d56b5f 100644 --- a/src/resources/extensions/gsd/tests/gitignore-tracked-gsd.test.ts +++ b/src/resources/extensions/gsd/tests/integration/gitignore-tracked-gsd.test.ts @@ -22,8 +22,8 @@ import { import { join } from "node:path"; import { tmpdir } from "node:os"; -import { ensureGitignore, hasGitTrackedGsdFiles } from "../gitignore.ts"; -import { migrateToExternalState } from "../migrate-external.ts"; +import { ensureGitignore, hasGitTrackedGsdFiles } from "../../gitignore.ts"; +import { migrateToExternalState } from "../../migrate-external.ts"; // ─── Helpers ───────────────────────────────────────────────────────── diff --git a/src/resources/extensions/gsd/tests/idle-recovery.test.ts b/src/resources/extensions/gsd/tests/integration/idle-recovery.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/idle-recovery.test.ts rename to src/resources/extensions/gsd/tests/integration/idle-recovery.test.ts index f8940dc61..a2369e4d8 100644 --- a/src/resources/extensions/gsd/tests/idle-recovery.test.ts +++ b/src/resources/extensions/gsd/tests/integration/idle-recovery.test.ts @@ -7,7 +7,7 @@ import { writeBlockerPlaceholder, verifyExpectedArtifact, buildLoopRemediationSteps, -} from "../auto-recovery.ts"; +} from "../../auto-recovery.ts"; import { describe, test, beforeEach, afterEach } from 'node:test'; import assert from 'node:assert/strict'; @@ -299,7 +299,7 @@ test('writeBlockerPlaceholder: updates DB task status for execute-task (#2531)', const base = createFixtureBase(); try { const { openDatabase, closeDatabase, insertMilestone, insertSlice, insertTask, getTask, isDbAvailable } = - await import("../gsd-db.ts"); + await import("../../gsd-db.ts"); const dbPath = join(base, ".gsd", "gsd.db"); // Create the tasks directory (required for artifact path resolution) @@ -334,7 +334,7 @@ test('writeBlockerPlaceholder: does NOT update DB for non-execute-task types', a const base = createFixtureBase(); try { const { openDatabase, closeDatabase, insertMilestone, insertSlice, getSlice, isDbAvailable } = - await import("../gsd-db.ts"); + await import("../../gsd-db.ts"); const dbPath = join(base, ".gsd", "gsd.db"); mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true }); diff --git a/src/resources/extensions/gsd/tests/inherited-repo-home-dir.test.ts b/src/resources/extensions/gsd/tests/integration/inherited-repo-home-dir.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/inherited-repo-home-dir.test.ts rename to src/resources/extensions/gsd/tests/integration/inherited-repo-home-dir.test.ts index 297a5d61c..44e6e7aeb 100644 --- a/src/resources/extensions/gsd/tests/inherited-repo-home-dir.test.ts +++ b/src/resources/extensions/gsd/tests/integration/inherited-repo-home-dir.test.ts @@ -24,7 +24,7 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import { execFileSync } from "node:child_process"; -import { isInheritedRepo } from "../repo-identity.ts"; +import { isInheritedRepo } from "../../repo-identity.ts"; function run(cmd: string, args: string[], cwd: string): string { return execFileSync(cmd, args, { diff --git a/src/resources/extensions/gsd/tests/integration-lifecycle.test.ts b/src/resources/extensions/gsd/tests/integration/integration-lifecycle.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/integration-lifecycle.test.ts rename to src/resources/extensions/gsd/tests/integration/integration-lifecycle.test.ts index 2cfa31ea8..453ffcbbc 100644 --- a/src/resources/extensions/gsd/tests/integration-lifecycle.test.ts +++ b/src/resources/extensions/gsd/tests/integration/integration-lifecycle.test.ts @@ -12,15 +12,15 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync, readFileSync, appendFile import { join } from 'node:path'; import { tmpdir } from 'node:os'; -import { openDatabase, closeDatabase, isDbAvailable, _getAdapter } from '../gsd-db.ts'; -import { migrateFromMarkdown, parseDecisionsTable } from '../md-importer.ts'; +import { openDatabase, closeDatabase, isDbAvailable, _getAdapter } from '../../gsd-db.ts'; +import { migrateFromMarkdown, parseDecisionsTable } from '../../md-importer.ts'; import { queryDecisions, queryRequirements, formatDecisionsForPrompt, formatRequirementsForPrompt, -} from '../context-store.ts'; -import { saveDecisionToDb, generateDecisionsMd } from '../db-writer.ts'; +} from '../../context-store.ts'; +import { saveDecisionToDb, generateDecisionsMd } from '../../db-writer.ts'; import { describe, test, beforeEach, afterEach } from 'node:test'; import assert from 'node:assert/strict'; diff --git a/src/resources/extensions/gsd/tests/integration-mixed-milestones.test.ts b/src/resources/extensions/gsd/tests/integration/integration-mixed-milestones.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/integration-mixed-milestones.test.ts rename to src/resources/extensions/gsd/tests/integration/integration-mixed-milestones.test.ts index 94d2d76b6..f640bb77d 100644 --- a/src/resources/extensions/gsd/tests/integration-mixed-milestones.test.ts +++ b/src/resources/extensions/gsd/tests/integration/integration-mixed-milestones.test.ts @@ -11,15 +11,15 @@ import { execSync } from 'node:child_process'; import { join } from 'node:path'; import { tmpdir } from 'node:os'; -import { deriveState } from '../state.ts'; -import { indexWorkspace } from '../workspace-index.ts'; -import { inlinePriorMilestoneSummary } from '../files.ts'; -import { getPriorSliceCompletionBlocker } from '../dispatch-guard.ts'; +import { deriveState } from '../../state.ts'; +import { indexWorkspace } from '../../workspace-index.ts'; +import { inlinePriorMilestoneSummary } from '../../files.ts'; +import { getPriorSliceCompletionBlocker } from '../../dispatch-guard.ts'; import { getSliceBranchName, parseSliceBranch, -} from '../worktree.ts'; -import { clearPathCache } from '../paths.ts'; +} from '../../worktree.ts'; +import { clearPathCache } from '../../paths.ts'; import { describe, test, beforeEach, afterEach } from 'node:test'; import assert from 'node:assert/strict'; diff --git a/src/resources/extensions/gsd/tests/integration-proof.test.ts b/src/resources/extensions/gsd/tests/integration/integration-proof.test.ts similarity index 97% rename from src/resources/extensions/gsd/tests/integration-proof.test.ts rename to src/resources/extensions/gsd/tests/integration/integration-proof.test.ts index cd48e5f3e..993389b56 100644 --- a/src/resources/extensions/gsd/tests/integration-proof.test.ts +++ b/src/resources/extensions/gsd/tests/integration/integration-proof.test.ts @@ -50,11 +50,11 @@ import { transaction, isDbAvailable, _getAdapter, -} from "../gsd-db.ts"; +} from "../../gsd-db.ts"; // ── Tool handlers ───────────────────────────────────────────────────────── -import { handleCompleteTask } from "../tools/complete-task.ts"; -import { handleCompleteSlice } from "../tools/complete-slice.ts"; +import { handleCompleteTask } from "../../tools/complete-task.ts"; +import { handleCompleteSlice } from "../../tools/complete-slice.ts"; // ── Markdown renderer ───────────────────────────────────────────────────── import { @@ -63,32 +63,32 @@ import { renderAllFromDb, detectStaleRenders, repairStaleRenders, -} from "../markdown-renderer.ts"; +} from "../../markdown-renderer.ts"; // ── State derivation ────────────────────────────────────────────────────── import { deriveStateFromDb, _deriveStateImpl, invalidateStateCache, -} from "../state.ts"; +} from "../../state.ts"; // ── Auto-migration ─────────────────────────────────────────────────────── import { migrateHierarchyToDb, migrateFromMarkdown, -} from "../md-importer.ts"; +} from "../../md-importer.ts"; // ── Post-unit diagnostics ───────────────────────────────────────────────── -import { detectRogueFileWrites } from "../auto-post-unit.ts"; +import { detectRogueFileWrites } from "../../auto-post-unit.ts"; // ── Doctor ──────────────────────────────────────────────────────────────── -import { runGSDDoctor } from "../doctor.ts"; +import { runGSDDoctor } from "../../doctor.ts"; // ── Undo/reset ──────────────────────────────────────────────────────────── -import { handleUndoTask, handleResetSlice } from "../undo.ts"; +import { handleUndoTask, handleResetSlice } from "../../undo.ts"; // ── Cache invalidation ─────────────────────────────────────────────────── -import { invalidateAllCaches } from "../cache.ts"; +import { invalidateAllCaches } from "../../cache.ts"; // ═══════════════════════════════════════════════════════════════════════════ // Helpers @@ -400,7 +400,7 @@ test("full lifecycle: migration through completion through doctor", async (t) => writeFileSync(join(rogueDir, "T99-SUMMARY.md"), "# Rogue Summary\n", "utf-8"); // Clear path cache so resolveTaskFile sees the newly written file - const { clearPathCache } = await import("../paths.ts"); + const { clearPathCache } = await import("../../paths.ts"); clearPathCache(); const rogues = detectRogueFileWrites("execute-task", "M001/S01/T99", base); @@ -458,7 +458,7 @@ test("recovery: DB loss → migrateFromMarkdown restores state, stale render det assert.equal(existsSync(dbPath), false, "DB file should be deleted"); // Clear path caches so gsdRoot re-probes after DB deletion - const { clearPathCache: clearPaths } = await import("../paths.ts"); + const { clearPathCache: clearPaths } = await import("../../paths.ts"); clearPaths(); invalidateAllCaches(); diff --git a/src/resources/extensions/gsd/tests/migrate-command.test.ts b/src/resources/extensions/gsd/tests/integration/migrate-command.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/migrate-command.test.ts rename to src/resources/extensions/gsd/tests/integration/migrate-command.test.ts index 52473ed66..5ecc17b0e 100644 --- a/src/resources/extensions/gsd/tests/migrate-command.test.ts +++ b/src/resources/extensions/gsd/tests/integration/migrate-command.test.ts @@ -13,8 +13,8 @@ import { transformToGSD, generatePreview, writeGSDDirectory, -} from '../migrate/index.ts'; -import { deriveState } from '../state.ts'; +} from '../../migrate/index.ts'; +import { deriveState } from '../../state.ts'; import { describe, test, beforeEach, afterEach } from 'node:test'; import assert from 'node:assert/strict'; diff --git a/src/resources/extensions/gsd/tests/milestone-transition-worktree.test.ts b/src/resources/extensions/gsd/tests/integration/milestone-transition-worktree.test.ts similarity index 97% rename from src/resources/extensions/gsd/tests/milestone-transition-worktree.test.ts rename to src/resources/extensions/gsd/tests/integration/milestone-transition-worktree.test.ts index aaeed23d0..a283a6a8c 100644 --- a/src/resources/extensions/gsd/tests/milestone-transition-worktree.test.ts +++ b/src/resources/extensions/gsd/tests/integration/milestone-transition-worktree.test.ts @@ -24,7 +24,7 @@ import { isInAutoWorktree, getAutoWorktreeOriginalBase, mergeMilestoneToMain, -} from "../auto-worktree.ts"; +} from "../../auto-worktree.ts"; const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -124,7 +124,7 @@ test("worktree swap on milestone transition: merge old, create new", () => { test("auto/phases.ts milestone transition block contains worktree lifecycle", () => { const phasesSrc = readFileSync( - join(__dirname, "..", "auto", "phases.ts"), + join(__dirname, "../..", "auto", "phases.ts"), "utf-8", ); @@ -147,7 +147,7 @@ test("auto/phases.ts milestone transition block contains worktree lifecycle", () test("worktree-resolver mergeAndExit preserves branch when roadmap is missing (#1573)", () => { const resolverSrc = readFileSync( - join(__dirname, "..", "worktree-resolver.ts"), + join(__dirname, "../..", "worktree-resolver.ts"), "utf-8", ); diff --git a/src/resources/extensions/gsd/tests/parallel-merge.test.ts b/src/resources/extensions/gsd/tests/integration/parallel-merge.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/parallel-merge.test.ts rename to src/resources/extensions/gsd/tests/integration/parallel-merge.test.ts index ec943e0a8..038f40f44 100644 --- a/src/resources/extensions/gsd/tests/parallel-merge.test.ts +++ b/src/resources/extensions/gsd/tests/integration/parallel-merge.test.ts @@ -32,12 +32,12 @@ import { mergeAllCompleted, formatMergeResults, type MergeResult, -} from "../parallel-merge.ts"; -import type { WorkerInfo } from "../parallel-orchestrator.ts"; +} from "../../parallel-merge.ts"; +import type { WorkerInfo } from "../../parallel-orchestrator.ts"; import { writeSessionStatus, readSessionStatus, -} from "../session-status-io.ts"; +} from "../../session-status-io.ts"; // ─── Helpers ────────────────────────────────────────────────────────────────── diff --git a/src/resources/extensions/gsd/tests/parallel-workers-multi-milestone-e2e.test.ts b/src/resources/extensions/gsd/tests/integration/parallel-workers-multi-milestone-e2e.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/parallel-workers-multi-milestone-e2e.test.ts rename to src/resources/extensions/gsd/tests/integration/parallel-workers-multi-milestone-e2e.test.ts index ae4eccf62..9dc67279e 100644 --- a/src/resources/extensions/gsd/tests/parallel-workers-multi-milestone-e2e.test.ts +++ b/src/resources/extensions/gsd/tests/integration/parallel-workers-multi-milestone-e2e.test.ts @@ -26,12 +26,12 @@ import { getWorkerBatches, hasActiveWorkers, resetWorkerRegistry, -} from '../../subagent/worker-registry.ts'; +} from '../../../subagent/worker-registry.ts'; import { getBudgetAlertLevel, getNewBudgetAlertLevel, getBudgetEnforcementAction, -} from '../auto-budget.ts'; +} from '../../auto-budget.ts'; import { type UnitMetrics, type MetricsLedger, @@ -42,7 +42,7 @@ import { formatCostProjection, getAverageCostPerUnitType, predictRemainingCost, -} from '../metrics.ts'; +} from '../../metrics.ts'; // ─── Fixture helpers ────────────────────────────────────────────────────────── diff --git a/src/resources/extensions/gsd/tests/paths.test.ts b/src/resources/extensions/gsd/tests/integration/paths.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/paths.test.ts rename to src/resources/extensions/gsd/tests/integration/paths.test.ts index 4ffdeaed9..64c186a15 100644 --- a/src/resources/extensions/gsd/tests/paths.test.ts +++ b/src/resources/extensions/gsd/tests/integration/paths.test.ts @@ -5,7 +5,7 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import { spawnSync } from "node:child_process"; -import { gsdRoot, _clearGsdRootCache } from "../paths.ts"; +import { gsdRoot, _clearGsdRootCache } from "../../paths.ts"; /** Create a tmp dir and resolve symlinks + 8.3 short names (macOS /var→/private/var, Windows RUNNER~1→runneradmin). */ function tmp(): string { const p = mkdtempSync(join(tmpdir(), "gsd-paths-test-")); diff --git a/src/resources/extensions/gsd/tests/plugin-importer-live.test.ts b/src/resources/extensions/gsd/tests/integration/plugin-importer-live.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/plugin-importer-live.test.ts rename to src/resources/extensions/gsd/tests/integration/plugin-importer-live.test.ts index 6971a6209..7288ac4a7 100644 --- a/src/resources/extensions/gsd/tests/plugin-importer-live.test.ts +++ b/src/resources/extensions/gsd/tests/integration/plugin-importer-live.test.ts @@ -11,8 +11,8 @@ import { describe, it, before, after } from 'node:test'; import assert from 'node:assert'; -import { PluginImporter, type DiscoveryResult, type ImportManifest } from '../plugin-importer.js'; -import { getMarketplaceFixtures } from './marketplace-test-fixtures.js'; +import { PluginImporter, type DiscoveryResult, type ImportManifest } from '../../plugin-importer.js'; +import { getMarketplaceFixtures } from '../marketplace-test-fixtures.ts'; // ============================================================================ // Live Test Configuration diff --git a/src/resources/extensions/gsd/tests/queue-completed-milestone-perf.test.ts b/src/resources/extensions/gsd/tests/integration/queue-completed-milestone-perf.test.ts similarity index 96% rename from src/resources/extensions/gsd/tests/queue-completed-milestone-perf.test.ts rename to src/resources/extensions/gsd/tests/integration/queue-completed-milestone-perf.test.ts index 75c1e871a..0f88cf69d 100644 --- a/src/resources/extensions/gsd/tests/queue-completed-milestone-perf.test.ts +++ b/src/resources/extensions/gsd/tests/integration/queue-completed-milestone-perf.test.ts @@ -15,9 +15,9 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs"; import { join } from "node:path"; import { tmpdir } from "node:os"; -import { buildExistingMilestonesContext } from "../guided-flow-queue.ts"; -import type { GSDState, MilestoneRegistryEntry } from "../types.ts"; -import { createTestContext } from "./test-helpers.ts"; +import { buildExistingMilestonesContext } from "../../guided-flow-queue.ts"; +import type { GSDState, MilestoneRegistryEntry } from "../../types.ts"; +import { createTestContext } from "../test-helpers.ts"; const { assertTrue, assertEq, report } = createTestContext(); diff --git a/src/resources/extensions/gsd/tests/queue-reorder-e2e.test.ts b/src/resources/extensions/gsd/tests/integration/queue-reorder-e2e.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/queue-reorder-e2e.test.ts rename to src/resources/extensions/gsd/tests/integration/queue-reorder-e2e.test.ts index f74105f47..f479673a5 100644 --- a/src/resources/extensions/gsd/tests/queue-reorder-e2e.test.ts +++ b/src/resources/extensions/gsd/tests/integration/queue-reorder-e2e.test.ts @@ -17,10 +17,10 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync, readFileSync, existsSync import { join } from 'node:path'; import { tmpdir } from 'node:os'; -import { deriveState, invalidateStateCache } from '../state.ts'; -import { findMilestoneIds } from '../guided-flow.ts'; -import { saveQueueOrder, loadQueueOrder } from '../queue-order.ts'; -import { parseContextDependsOn } from '../files.ts'; +import { deriveState, invalidateStateCache } from '../../state.ts'; +import { findMilestoneIds } from '../../guided-flow.ts'; +import { saveQueueOrder, loadQueueOrder } from '../../queue-order.ts'; +import { parseContextDependsOn } from '../../files.ts'; // ─── Fixture Helpers ─────────────────────────────────────────────────────── function createFixtureBase(): string { @@ -298,7 +298,7 @@ test('E2E: DB-backed path respects queue order (#2556)', async () => { // the dispatch guard (which respects queue order) blocked completion. const base = createFixtureBase(); try { - const { openDatabase, closeDatabase, insertMilestone, isDbAvailable } = await import('../gsd-db.ts'); + const { openDatabase, closeDatabase, insertMilestone, isDbAvailable } = await import('../../gsd-db.ts'); const dbPath = join(base, '.gsd', 'gsd.db'); // Create milestone directories (required for findMilestoneIds) diff --git a/src/resources/extensions/gsd/tests/quick-branch-lifecycle.test.ts b/src/resources/extensions/gsd/tests/integration/quick-branch-lifecycle.test.ts similarity index 97% rename from src/resources/extensions/gsd/tests/quick-branch-lifecycle.test.ts rename to src/resources/extensions/gsd/tests/integration/quick-branch-lifecycle.test.ts index f707ff902..a4d77703b 100644 --- a/src/resources/extensions/gsd/tests/quick-branch-lifecycle.test.ts +++ b/src/resources/extensions/gsd/tests/integration/quick-branch-lifecycle.test.ts @@ -14,8 +14,8 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import { execSync } from "node:child_process"; -import { captureIntegrationBranch, getCurrentBranch } from "../worktree.ts"; -import { readIntegrationBranch, QUICK_BRANCH_RE } from "../git-service.ts"; +import { captureIntegrationBranch, getCurrentBranch } from "../../worktree.ts"; +import { readIntegrationBranch, QUICK_BRANCH_RE } from "../../git-service.ts"; function run(command: string, cwd: string): string { return execSync(command, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); @@ -139,7 +139,7 @@ test('cleanupQuickBranch: merges back and cleans up (same session)', async () => // Import and call cleanupQuickBranch // Use dynamic import to get a fresh module scope — the in-memory state // won't be set, so it will fall through to disk recovery - const { cleanupQuickBranch } = await import("../quick.ts"); + const { cleanupQuickBranch } = await import("../../quick.ts"); const result = cleanupQuickBranch(); assert.ok(result, "cleanupQuickBranch returns true"); @@ -187,7 +187,7 @@ test('cleanupQuickBranch: recovers from disk state (cross-session)', async () => process.chdir(repo); - const { cleanupQuickBranch } = await import("../quick.ts"); + const { cleanupQuickBranch } = await import("../../quick.ts"); const result = cleanupQuickBranch(); assert.ok(result, "cross-session recovery returns true"); @@ -207,7 +207,7 @@ test('cleanupQuickBranch: no-op without pending state', async () => { const origCwd = process.cwd(); process.chdir(repo); - const { cleanupQuickBranch } = await import("../quick.ts"); + const { cleanupQuickBranch } = await import("../../quick.ts"); const result = cleanupQuickBranch(); assert.ok(!result, "returns false when no pending state"); diff --git a/src/resources/extensions/gsd/tests/run-uat.test.ts b/src/resources/extensions/gsd/tests/integration/run-uat.test.ts similarity index 98% rename from src/resources/extensions/gsd/tests/run-uat.test.ts rename to src/resources/extensions/gsd/tests/integration/run-uat.test.ts index 89c8307bd..cf9d44f74 100644 --- a/src/resources/extensions/gsd/tests/run-uat.test.ts +++ b/src/resources/extensions/gsd/tests/integration/run-uat.test.ts @@ -5,12 +5,12 @@ import { join, dirname } from 'node:path'; import { tmpdir } from 'node:os'; import { fileURLToPath } from 'node:url'; -import { extractUatType } from '../files.ts'; -import { resolveSliceFile } from '../paths.ts'; -import { checkNeedsRunUat } from '../auto-prompts.ts'; +import { extractUatType } from '../../files.ts'; +import { resolveSliceFile } from '../../paths.ts'; +import { checkNeedsRunUat } from '../../auto-prompts.ts'; const __dirname = dirname(fileURLToPath(import.meta.url)); -const worktreePromptsDir = join(__dirname, '..', 'prompts'); +const worktreePromptsDir = join(__dirname, '../..', 'prompts'); function loadPromptFromWorktree(name: string, vars: Record = {}): string { const path = join(worktreePromptsDir, `${name}.md`); diff --git a/src/resources/extensions/gsd/tests/token-savings.test.ts b/src/resources/extensions/gsd/tests/integration/token-savings.test.ts similarity index 99% rename from src/resources/extensions/gsd/tests/token-savings.test.ts rename to src/resources/extensions/gsd/tests/integration/token-savings.test.ts index a8bf5e669..708c1a787 100644 --- a/src/resources/extensions/gsd/tests/token-savings.test.ts +++ b/src/resources/extensions/gsd/tests/integration/token-savings.test.ts @@ -10,14 +10,14 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync, readFileSync } from 'nod import { join } from 'node:path'; import { tmpdir } from 'node:os'; -import { openDatabase, closeDatabase } from '../gsd-db.ts'; -import { migrateFromMarkdown } from '../md-importer.ts'; +import { openDatabase, closeDatabase } from '../../gsd-db.ts'; +import { migrateFromMarkdown } from '../../md-importer.ts'; import { queryDecisions, queryRequirements, formatDecisionsForPrompt, formatRequirementsForPrompt, -} from '../context-store.ts'; +} from '../../context-store.ts'; import { test } from 'node:test'; import assert from 'node:assert/strict'; diff --git a/src/resources/extensions/gsd/tests/worktree-e2e.test.ts b/src/resources/extensions/gsd/tests/integration/worktree-e2e.test.ts similarity index 97% rename from src/resources/extensions/gsd/tests/worktree-e2e.test.ts rename to src/resources/extensions/gsd/tests/integration/worktree-e2e.test.ts index 43bd272a1..fdca0640b 100644 --- a/src/resources/extensions/gsd/tests/worktree-e2e.test.ts +++ b/src/resources/extensions/gsd/tests/integration/worktree-e2e.test.ts @@ -18,10 +18,10 @@ import { execSync } from "node:child_process"; import { createAutoWorktree, mergeMilestoneToMain, -} from "../auto-worktree.ts"; -import { getSliceBranchName } from "../worktree.ts"; -import { abortAndReset } from "../git-self-heal.ts"; -import { runGSDDoctor } from "../doctor.ts"; +} from "../../auto-worktree.ts"; +import { getSliceBranchName } from "../../worktree.ts"; +import { abortAndReset } from "../../git-self-heal.ts"; +import { runGSDDoctor } from "../../doctor.ts"; import { describe, test } from 'node:test'; import assert from 'node:assert/strict'; diff --git a/src/resources/extensions/gsd/visualizer-overlay.ts b/src/resources/extensions/gsd/visualizer-overlay.ts index 196b2f8ec..68c41d81a 100644 --- a/src/resources/extensions/gsd/visualizer-overlay.ts +++ b/src/resources/extensions/gsd/visualizer-overlay.ts @@ -14,7 +14,10 @@ import { renderHealthView, type ProgressFilter, } from "./visualizer-views.js"; +import { writeFileSync, mkdirSync } from "node:fs"; +import { join } from "node:path"; import { writeExportFile } from "./export.js"; +import { gsdRoot } from "./paths.js"; import { stripAnsi } from "../shared/mod.js"; const TAB_COUNT = 10; @@ -350,9 +353,6 @@ export class GSDVisualizerOverlay { // Capture current active tab's rendered lines as snapshot const snapshotLines = this.renderTabContent(this.activeTab, 80); const timestamp = new Date().toISOString().replace(/[:.]/g, "-").slice(0, 19); - const { writeFileSync, mkdirSync } = require("node:fs"); - const { join } = require("node:path"); - const { gsdRoot } = require("./paths.js"); const exportDir = gsdRoot(this.basePath); mkdirSync(exportDir, { recursive: true }); const outPath = join(exportDir, `snapshot-${timestamp}.txt`); diff --git a/src/resources/extensions/shared/format-utils.ts b/src/resources/extensions/shared/format-utils.ts index 122d122bd..226cb4cac 100644 --- a/src/resources/extensions/shared/format-utils.ts +++ b/src/resources/extensions/shared/format-utils.ts @@ -11,7 +11,7 @@ /** Format a millisecond duration as a compact human-readable string. */ export function formatDuration(ms: number): string { - if (ms < 1000) return `${ms}ms`; + if (ms > 0 && ms < 1000) return `${ms}ms`; const s = Math.floor(ms / 1000); if (s < 60) return `${s}s`; const m = Math.floor(s / 60); diff --git a/src/resources/extensions/subagent/worker-registry.ts b/src/resources/extensions/subagent/worker-registry.ts index ac52e9289..1f6cb90e2 100644 --- a/src/resources/extensions/subagent/worker-registry.ts +++ b/src/resources/extensions/subagent/worker-registry.ts @@ -54,9 +54,10 @@ export function updateWorker(id: string, status: "completed" | "failed"): void { if (entry) { entry.status = status; // Remove after a brief display window (5 seconds) + // unref() so the timer doesn't keep the process alive in test environments setTimeout(() => { activeWorkers.delete(id); - }, 5000); + }, 5000).unref(); } } diff --git a/src/tests/docker-template.test.ts b/src/tests/docker-template.test.ts index dc01b3551..5fe53b556 100644 --- a/src/tests/docker-template.test.ts +++ b/src/tests/docker-template.test.ts @@ -1,11 +1,9 @@ import test from "node:test"; import assert from "node:assert/strict"; import { readFileSync, existsSync } from "node:fs"; -import { resolve, dirname } from "node:path"; -import { fileURLToPath } from "node:url"; +import { resolve } from "node:path"; -const __dirname = dirname(fileURLToPath(import.meta.url)); -const root = resolve(__dirname, "../.."); +const root = process.cwd(); function readFile(relativePath: string): string { const full = resolve(root, relativePath); diff --git a/src/tests/ensure-workspace-builds.test.ts b/src/tests/ensure-workspace-builds.test.ts new file mode 100644 index 000000000..f256c7afe --- /dev/null +++ b/src/tests/ensure-workspace-builds.test.ts @@ -0,0 +1,64 @@ +import { describe, it, beforeEach, afterEach } from "node:test"; +import assert from "node:assert/strict"; +import { mkdtempSync, writeFileSync, mkdirSync, rmSync, utimesSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { createRequire } from "node:module"; + +const require = createRequire(import.meta.url); +const { newestSrcMtime } = require("../../scripts/ensure-workspace-builds.cjs"); + +describe("newestSrcMtime", () => { + let tmp: string; + + beforeEach(() => { tmp = mkdtempSync(join(tmpdir(), "gsd-mtime-test-")); }); + afterEach(() => { rmSync(tmp, { recursive: true, force: true }); }); + + it("returns 0 for a non-existent directory", () => { + assert.equal(newestSrcMtime(join(tmp, "does-not-exist")), 0); + }); + + it("returns 0 when directory has no .ts files", () => { + writeFileSync(join(tmp, "index.js"), ""); + writeFileSync(join(tmp, "config.json"), ""); + assert.equal(newestSrcMtime(tmp), 0); + }); + + it("returns the mtime of a single .ts file", () => { + const file = join(tmp, "index.ts"); + writeFileSync(file, ""); + const mtime = new Date("2024-01-15T10:00:00Z"); + utimesSync(file, mtime, mtime); + assert.equal(newestSrcMtime(tmp), mtime.getTime()); + }); + + it("returns the max mtime across multiple .ts files", () => { + const older = join(tmp, "a.ts"); + const newer = join(tmp, "b.ts"); + writeFileSync(older, ""); + writeFileSync(newer, ""); + utimesSync(older, new Date("2024-01-01T00:00:00Z"), new Date("2024-01-01T00:00:00Z")); + utimesSync(newer, new Date("2024-06-01T00:00:00Z"), new Date("2024-06-01T00:00:00Z")); + assert.equal(newestSrcMtime(tmp), new Date("2024-06-01T00:00:00Z").getTime()); + }); + + it("recurses into subdirectories", () => { + const subdir = join(tmp, "nested", "deep"); + mkdirSync(subdir, { recursive: true }); + const file = join(subdir, "util.ts"); + writeFileSync(file, ""); + const mtime = new Date("2024-03-01T00:00:00Z"); + utimesSync(file, mtime, mtime); + assert.equal(newestSrcMtime(tmp), mtime.getTime()); + }); + + it("skips node_modules entirely", () => { + const nm = join(tmp, "node_modules", "some-pkg"); + mkdirSync(nm, { recursive: true }); + const nmFile = join(nm, "index.ts"); + writeFileSync(nmFile, ""); + const future = new Date("2099-01-01T00:00:00Z"); + utimesSync(nmFile, future, future); + assert.equal(newestSrcMtime(tmp), 0); + }); +}); diff --git a/src/tests/ci_monitor.test.ts b/src/tests/integration/ci_monitor.test.ts similarity index 98% rename from src/tests/ci_monitor.test.ts rename to src/tests/integration/ci_monitor.test.ts index 745df409f..90449ddbf 100644 --- a/src/tests/ci_monitor.test.ts +++ b/src/tests/integration/ci_monitor.test.ts @@ -13,7 +13,7 @@ import { join, dirname } from 'node:path'; import { fileURLToPath } from 'node:url'; const __dirname = dirname(fileURLToPath(import.meta.url)); -const ROOT = join(__dirname, '..', '..'); +const ROOT = join(__dirname, '..', '..', '..'); const SCRIPT_PATH = join(ROOT, 'scripts', 'ci_monitor.cjs'); let passed = 0; diff --git a/src/tests/web-auth-token.test.ts b/src/tests/integration/web-auth-token.test.ts similarity index 100% rename from src/tests/web-auth-token.test.ts rename to src/tests/integration/web-auth-token.test.ts diff --git a/src/tests/web-boot-node24.test.ts b/src/tests/integration/web-boot-node24.test.ts similarity index 98% rename from src/tests/web-boot-node24.test.ts rename to src/tests/integration/web-boot-node24.test.ts index dd587aefa..8dda73414 100644 --- a/src/tests/web-boot-node24.test.ts +++ b/src/tests/integration/web-boot-node24.test.ts @@ -1,7 +1,7 @@ import test from "node:test" import assert from "node:assert/strict" -import { resolveTypeStrippingFlag } from "../web/ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag } from "../../web/ts-subprocess-flags.ts" // --------------------------------------------------------------------------- // Bug 1 — resolveTypeStrippingFlag selects the correct flag diff --git a/src/tests/web-bridge-contract.test.ts b/src/tests/integration/web-bridge-contract.test.ts similarity index 98% rename from src/tests/web-bridge-contract.test.ts rename to src/tests/integration/web-bridge-contract.test.ts index 1e8218526..3de7fd6f6 100644 --- a/src/tests/web-bridge-contract.test.ts +++ b/src/tests/integration/web-bridge-contract.test.ts @@ -8,12 +8,12 @@ import { PassThrough } from "node:stream"; import { StringDecoder } from "node:string_decoder"; const repoRoot = process.cwd(); -const bridge = await import("../web/bridge-service.ts"); -const onboarding = await import("../web/onboarding-service.ts"); +const bridge = await import("../../web/bridge-service.ts"); +const onboarding = await import("../../web/onboarding-service.ts"); const { AuthStorage } = await import("@gsd/pi-coding-agent"); -const bootRoute = await import("../../web/app/api/boot/route.ts"); -const commandRoute = await import("../../web/app/api/session/command/route.ts"); -const eventsRoute = await import("../../web/app/api/session/events/route.ts"); +const bootRoute = await import("../../../web/app/api/boot/route.ts"); +const commandRoute = await import("../../../web/app/api/session/command/route.ts"); +const eventsRoute = await import("../../../web/app/api/session/events/route.ts"); class FakeRpcChild extends EventEmitter { stdin = new PassThrough(); diff --git a/src/tests/web-bridge-package-root.test.ts b/src/tests/integration/web-bridge-package-root.test.ts similarity index 97% rename from src/tests/web-bridge-package-root.test.ts rename to src/tests/integration/web-bridge-package-root.test.ts index f919ce873..8ccab075c 100644 --- a/src/tests/web-bridge-package-root.test.ts +++ b/src/tests/integration/web-bridge-package-root.test.ts @@ -14,7 +14,7 @@ import test from "node:test"; import assert from "node:assert/strict"; import { resolve } from "node:path"; -const bridge = await import("../web/bridge-service.ts"); +const bridge = await import("../../web/bridge-service.ts"); test("resolveBridgeRuntimeConfig uses GSD_WEB_PACKAGE_ROOT when set", () => { const env = { diff --git a/src/tests/web-bridge-terminal-contract.test.ts b/src/tests/integration/web-bridge-terminal-contract.test.ts similarity index 97% rename from src/tests/web-bridge-terminal-contract.test.ts rename to src/tests/integration/web-bridge-terminal-contract.test.ts index af604cace..3104c5329 100644 --- a/src/tests/web-bridge-terminal-contract.test.ts +++ b/src/tests/integration/web-bridge-terminal-contract.test.ts @@ -8,10 +8,10 @@ import { PassThrough } from "node:stream"; import { StringDecoder } from "node:string_decoder"; const repoRoot = process.cwd(); -const bridge = await import("../web/bridge-service.ts"); -const streamRoute = await import("../../web/app/api/bridge-terminal/stream/route.ts"); -const inputRoute = await import("../../web/app/api/bridge-terminal/input/route.ts"); -const resizeRoute = await import("../../web/app/api/bridge-terminal/resize/route.ts"); +const bridge = await import("../../web/bridge-service.ts"); +const streamRoute = await import("../../../web/app/api/bridge-terminal/stream/route.ts"); +const inputRoute = await import("../../../web/app/api/bridge-terminal/input/route.ts"); +const resizeRoute = await import("../../../web/app/api/bridge-terminal/resize/route.ts"); class FakeRpcChild extends EventEmitter { stdin = new PassThrough(); diff --git a/src/tests/web-cli-entry.test.ts b/src/tests/integration/web-cli-entry.test.ts similarity index 97% rename from src/tests/web-cli-entry.test.ts rename to src/tests/integration/web-cli-entry.test.ts index 022431168..6c69928a0 100644 --- a/src/tests/web-cli-entry.test.ts +++ b/src/tests/integration/web-cli-entry.test.ts @@ -5,7 +5,7 @@ import { join } from "node:path"; import { tmpdir } from "node:os"; import { pathToFileURL } from "node:url"; -const { resolveGsdCliEntry } = await import("../web/cli-entry.ts"); +const { resolveGsdCliEntry } = await import("../../web/cli-entry.ts"); function makeFixture(paths: string[]): string { const root = mkdtempSync(join(tmpdir(), "gsd-cli-entry-")); diff --git a/src/tests/web-command-parity-contract.test.ts b/src/tests/integration/web-command-parity-contract.test.ts similarity index 98% rename from src/tests/web-command-parity-contract.test.ts rename to src/tests/integration/web-command-parity-contract.test.ts index 2858f131b..96b6e2640 100644 --- a/src/tests/web-command-parity-contract.test.ts +++ b/src/tests/integration/web-command-parity-contract.test.ts @@ -3,19 +3,19 @@ import assert from "node:assert/strict" import { readFileSync } from "node:fs" import { resolve } from "node:path" -const { BUILTIN_SLASH_COMMANDS } = await import("../../packages/pi-coding-agent/src/core/slash-commands.ts") +const { BUILTIN_SLASH_COMMANDS } = await import("../../../packages/pi-coding-agent/src/core/slash-commands.ts") const { dispatchBrowserSlashCommand, getBrowserSlashCommandTerminalNotice, -} = await import("../../web/lib/browser-slash-command-dispatch.ts") +} = await import("../../../web/lib/browser-slash-command-dispatch.ts") const { applyCommandSurfaceActionResult, createInitialCommandSurfaceState, openCommandSurfaceState, setCommandSurfacePending, surfaceOutcomeToOpenRequest, -} = await import("../../web/lib/command-surface-contract.ts") -const gsdExtension = await import("../resources/extensions/gsd/index.ts") +} = await import("../../../web/lib/command-surface-contract.ts") +const gsdExtension = await import("../../resources/extensions/gsd/index.ts") const EXPECTED_BUILTIN_OUTCOMES = new Map([ ["settings", "surface"], @@ -680,7 +680,7 @@ test("surface action state keeps compaction summaries inspectable", () => { }) test("command-surface session affordances use the shared store action path", () => { - const commandSurfacePath = resolve(import.meta.dirname, "../../web/components/gsd/command-surface.tsx") + const commandSurfacePath = resolve(import.meta.dirname, "../../../web/components/gsd/command-surface.tsx") const commandSurfaceSource = readFileSync(commandSurfacePath, "utf-8") assert.match( diff --git a/src/tests/web-continuity-contract.test.ts b/src/tests/integration/web-continuity-contract.test.ts similarity index 100% rename from src/tests/web-continuity-contract.test.ts rename to src/tests/integration/web-continuity-contract.test.ts diff --git a/src/tests/web-dashboard-rtk-contract.test.ts b/src/tests/integration/web-dashboard-rtk-contract.test.ts similarity index 100% rename from src/tests/web-dashboard-rtk-contract.test.ts rename to src/tests/integration/web-dashboard-rtk-contract.test.ts diff --git a/src/tests/web-diagnostics-contract.test.ts b/src/tests/integration/web-diagnostics-contract.test.ts similarity index 98% rename from src/tests/web-diagnostics-contract.test.ts rename to src/tests/integration/web-diagnostics-contract.test.ts index ede1e68dd..eb698f3ca 100644 --- a/src/tests/web-diagnostics-contract.test.ts +++ b/src/tests/integration/web-diagnostics-contract.test.ts @@ -25,18 +25,18 @@ import type { SkillHealthReport, SkillHealthEntry, SkillHealSuggestion, -} from "../../web/lib/diagnostics-types.ts" +} from "../../../web/lib/diagnostics-types.ts" const { createInitialCommandSurfaceState, commandSurfaceSectionForRequest, -} = await import("../../web/lib/command-surface-contract.ts") +} = await import("../../../web/lib/command-surface-contract.ts") const { dispatchBrowserSlashCommand, -} = await import("../../web/lib/browser-slash-command-dispatch.ts") +} = await import("../../../web/lib/browser-slash-command-dispatch.ts") -const { GSDWorkspaceStore } = await import("../../web/lib/gsd-workspace-store.tsx") +const { GSDWorkspaceStore } = await import("../../../web/lib/gsd-workspace-store.tsx") // ─── Block 1: Type exports (R103, R104, R105) ─────────────────────────────── diff --git a/src/tests/web-live-interaction-contract.test.ts b/src/tests/integration/web-live-interaction-contract.test.ts similarity index 99% rename from src/tests/web-live-interaction-contract.test.ts rename to src/tests/integration/web-live-interaction-contract.test.ts index 4418abb63..5e288b69f 100644 --- a/src/tests/web-live-interaction-contract.test.ts +++ b/src/tests/integration/web-live-interaction-contract.test.ts @@ -8,11 +8,11 @@ import { PassThrough } from "node:stream"; import { StringDecoder } from "node:string_decoder"; const repoRoot = process.cwd(); -const bridge = await import("../web/bridge-service.ts"); -const onboarding = await import("../web/onboarding-service.ts"); +const bridge = await import("../../web/bridge-service.ts"); +const onboarding = await import("../../web/onboarding-service.ts"); const { AuthStorage } = await import("@gsd/pi-coding-agent"); -const commandRoute = await import("../../web/app/api/session/command/route.ts"); -const eventsRoute = await import("../../web/app/api/session/events/route.ts"); +const commandRoute = await import("../../../web/app/api/session/command/route.ts"); +const eventsRoute = await import("../../../web/app/api/session/events/route.ts"); // --------------------------------------------------------------------------- // Test infrastructure (reused from web-bridge-contract.test.ts) diff --git a/src/tests/web-live-state-contract.test.ts b/src/tests/integration/web-live-state-contract.test.ts similarity index 97% rename from src/tests/web-live-state-contract.test.ts rename to src/tests/integration/web-live-state-contract.test.ts index c2b1f7ecc..2af24bcc6 100644 --- a/src/tests/web-live-state-contract.test.ts +++ b/src/tests/integration/web-live-state-contract.test.ts @@ -8,13 +8,13 @@ import { PassThrough } from "node:stream"; import { StringDecoder } from "node:string_decoder"; const repoRoot = process.cwd(); -const bridge = await import("../web/bridge-service.ts"); -const onboarding = await import("../web/onboarding-service.ts"); +const bridge = await import("../../web/bridge-service.ts"); +const onboarding = await import("../../web/onboarding-service.ts"); const { AuthStorage } = await import("@gsd/pi-coding-agent"); -const commandRoute = await import("../../web/app/api/session/command/route.ts"); -const manageRoute = await import("../../web/app/api/session/manage/route.ts"); -const eventsRoute = await import("../../web/app/api/session/events/route.ts"); -const liveStateRoute = await import("../../web/app/api/live-state/route.ts"); +const commandRoute = await import("../../../web/app/api/session/command/route.ts"); +const manageRoute = await import("../../../web/app/api/session/manage/route.ts"); +const eventsRoute = await import("../../../web/app/api/session/events/route.ts"); +const liveStateRoute = await import("../../../web/app/api/live-state/route.ts"); class FakeRpcChild extends EventEmitter { stdin = new PassThrough(); diff --git a/src/tests/web-mode-cli.test.ts b/src/tests/integration/web-mode-cli.test.ts similarity index 99% rename from src/tests/web-mode-cli.test.ts rename to src/tests/integration/web-mode-cli.test.ts index c1e0ffe6f..249e17568 100644 --- a/src/tests/web-mode-cli.test.ts +++ b/src/tests/integration/web-mode-cli.test.ts @@ -6,8 +6,8 @@ import { tmpdir } from 'node:os' const projectRoot = process.cwd() -const cliWeb = await import('../cli-web-branch.ts') -const webMode = await import('../web-mode.ts') +const cliWeb = await import('../../cli-web-branch.ts') +const webMode = await import('../../web-mode.ts') test('parseCliArgs recognizes --web explicitly', () => { const flags = cliWeb.parseCliArgs(['node', 'dist/loader.js', '--web']) diff --git a/src/tests/web-mode-network-flags.test.ts b/src/tests/integration/web-mode-network-flags.test.ts similarity index 98% rename from src/tests/web-mode-network-flags.test.ts rename to src/tests/integration/web-mode-network-flags.test.ts index 29a57f542..7fb82fd56 100644 --- a/src/tests/web-mode-network-flags.test.ts +++ b/src/tests/integration/web-mode-network-flags.test.ts @@ -4,8 +4,8 @@ import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs' import { join } from 'node:path' import { tmpdir } from 'node:os' -const cliWeb = await import('../cli-web-branch.ts') -const webMode = await import('../web-mode.ts') +const cliWeb = await import('../../cli-web-branch.ts') +const webMode = await import('../../web-mode.ts') // ─── CLI flag parsing ──────────────────────────────────────────────── diff --git a/src/tests/web-multi-project-contract.test.ts b/src/tests/integration/web-multi-project-contract.test.ts similarity index 99% rename from src/tests/web-multi-project-contract.test.ts rename to src/tests/integration/web-multi-project-contract.test.ts index e3dc12660..4fa31c0ea 100644 --- a/src/tests/web-multi-project-contract.test.ts +++ b/src/tests/integration/web-multi-project-contract.test.ts @@ -8,7 +8,7 @@ import { PassThrough } from "node:stream"; import { StringDecoder } from "node:string_decoder"; const repoRoot = process.cwd(); -const bridge = await import("../web/bridge-service.ts"); +const bridge = await import("../../web/bridge-service.ts"); // --------------------------------------------------------------------------- // Helpers (same shape as web-bridge-contract.test.ts) diff --git a/src/tests/web-onboarding-contract.test.ts b/src/tests/integration/web-onboarding-contract.test.ts similarity index 98% rename from src/tests/web-onboarding-contract.test.ts rename to src/tests/integration/web-onboarding-contract.test.ts index aedb3e1ce..3ed833368 100644 --- a/src/tests/web-onboarding-contract.test.ts +++ b/src/tests/integration/web-onboarding-contract.test.ts @@ -8,11 +8,11 @@ import { PassThrough } from "node:stream"; import { StringDecoder } from "node:string_decoder"; const repoRoot = process.cwd(); -const bridge = await import("../web/bridge-service.ts"); -const onboarding = await import("../web/onboarding-service.ts"); -const bootRoute = await import("../../web/app/api/boot/route.ts"); -const onboardingRoute = await import("../../web/app/api/onboarding/route.ts"); -const commandRoute = await import("../../web/app/api/session/command/route.ts"); +const bridge = await import("../../web/bridge-service.ts"); +const onboarding = await import("../../web/onboarding-service.ts"); +const bootRoute = await import("../../../web/app/api/boot/route.ts"); +const onboardingRoute = await import("../../../web/app/api/onboarding/route.ts"); +const commandRoute = await import("../../../web/app/api/session/command/route.ts"); const { AuthStorage } = await import("@gsd/pi-coding-agent"); const ONBOARDING_ENV_KEYS = [ diff --git a/src/tests/web-onboarding-presentation.test.ts b/src/tests/integration/web-onboarding-presentation.test.ts similarity index 97% rename from src/tests/web-onboarding-presentation.test.ts rename to src/tests/integration/web-onboarding-presentation.test.ts index f74a0ff59..8cb297c2b 100644 --- a/src/tests/web-onboarding-presentation.test.ts +++ b/src/tests/integration/web-onboarding-presentation.test.ts @@ -1,7 +1,7 @@ import test from "node:test" import assert from "node:assert/strict" -const { getOnboardingPresentation } = await import("../../web/lib/gsd-workspace-store.tsx") +const { getOnboardingPresentation } = await import("../../../web/lib/gsd-workspace-store.tsx") function makeOnboardingState(overrides: Record = {}) { return { diff --git a/src/tests/web-project-discovery-contract.test.ts b/src/tests/integration/web-project-discovery-contract.test.ts similarity index 98% rename from src/tests/web-project-discovery-contract.test.ts rename to src/tests/integration/web-project-discovery-contract.test.ts index cd2c52fdd..51ca44f93 100644 --- a/src/tests/web-project-discovery-contract.test.ts +++ b/src/tests/integration/web-project-discovery-contract.test.ts @@ -4,8 +4,9 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs"; import { tmpdir } from "node:os"; import { basename, join } from "node:path"; -import { discoverProjects } from "../web/project-discovery-service.ts"; -import { detectMonorepo } from "../web/bridge-service.ts"; + +import { discoverProjects } from "../../web/project-discovery-service.ts"; +import { detectMonorepo } from "../../web/bridge-service.ts"; // --------------------------------------------------------------------------- // Fixture setup — standard multi-project root diff --git a/src/tests/web-project-url.test.ts b/src/tests/integration/web-project-url.test.ts similarity index 97% rename from src/tests/web-project-url.test.ts rename to src/tests/integration/web-project-url.test.ts index 350b94354..bbe9f918c 100644 --- a/src/tests/web-project-url.test.ts +++ b/src/tests/integration/web-project-url.test.ts @@ -1,7 +1,7 @@ import test from "node:test" import assert from "node:assert/strict" -import { buildProjectAbsoluteUrl, buildProjectPath } from "../../web/lib/project-url.ts" +import { buildProjectAbsoluteUrl, buildProjectPath } from "../../../web/lib/project-url.ts" test("buildProjectPath leaves non-project routes unchanged", () => { assert.equal(buildProjectPath("/api/terminal/input"), "/api/terminal/input") diff --git a/src/tests/web-recovery-diagnostics-contract.test.ts b/src/tests/integration/web-recovery-diagnostics-contract.test.ts similarity index 98% rename from src/tests/web-recovery-diagnostics-contract.test.ts rename to src/tests/integration/web-recovery-diagnostics-contract.test.ts index f3b2de070..110d96e8c 100644 --- a/src/tests/web-recovery-diagnostics-contract.test.ts +++ b/src/tests/integration/web-recovery-diagnostics-contract.test.ts @@ -8,8 +8,8 @@ import { PassThrough } from "node:stream" import { StringDecoder } from "node:string_decoder" const repoRoot = process.cwd() -const bridge = await import("../web/bridge-service.ts") -const recoveryRoute = await import("../../web/app/api/recovery/route.ts") +const bridge = await import("../../web/bridge-service.ts") +const recoveryRoute = await import("../../../web/app/api/recovery/route.ts") class FakeRpcChild extends EventEmitter { stdin = new PassThrough() diff --git a/src/tests/web-responsive.test.ts b/src/tests/integration/web-responsive.test.ts similarity index 99% rename from src/tests/web-responsive.test.ts rename to src/tests/integration/web-responsive.test.ts index 847a7a5e2..f159103e7 100644 --- a/src/tests/web-responsive.test.ts +++ b/src/tests/integration/web-responsive.test.ts @@ -10,7 +10,7 @@ import assert from 'node:assert/strict' import { readFileSync } from 'node:fs' import { resolve } from 'node:path' -const WEB_ROOT = resolve(import.meta.dirname, '../../web') +const WEB_ROOT = resolve(import.meta.dirname, '../../../web') function readComponent(relativePath: string): string { return readFileSync(resolve(WEB_ROOT, relativePath), 'utf-8') diff --git a/src/tests/web-session-parity-contract.test.ts b/src/tests/integration/web-session-parity-contract.test.ts similarity index 97% rename from src/tests/web-session-parity-contract.test.ts rename to src/tests/integration/web-session-parity-contract.test.ts index 5b5fa628d..9e8b1afcf 100644 --- a/src/tests/web-session-parity-contract.test.ts +++ b/src/tests/integration/web-session-parity-contract.test.ts @@ -9,11 +9,11 @@ import { PassThrough } from "node:stream" import { StringDecoder } from "node:string_decoder" const repoRoot = process.cwd() -const bridge = await import("../web/bridge-service.ts") -const onboarding = await import("../web/onboarding-service.ts") -const browserRoute = await import("../../web/app/api/session/browser/route.ts") -const manageRoute = await import("../../web/app/api/session/manage/route.ts") -const gitRoute = await import("../../web/app/api/git/route.ts") +const bridge = await import("../../web/bridge-service.ts") +const onboarding = await import("../../web/onboarding-service.ts") +const browserRoute = await import("../../../web/app/api/session/browser/route.ts") +const manageRoute = await import("../../../web/app/api/session/manage/route.ts") +const gitRoute = await import("../../../web/app/api/git/route.ts") const { AuthStorage } = await import("@gsd/pi-coding-agent") class FakeRpcChild extends EventEmitter { @@ -635,12 +635,12 @@ test("/api/git exposes an explicit not-a-repo state instead of failing silently" }) test("browser session, settings, and git surfaces keep inspectable browse/manage/state markers on the shared surface", () => { - const rpcTypesSource = readFileSync(resolve(import.meta.dirname, "../../packages/pi-coding-agent/src/modes/rpc/rpc-types.ts"), "utf8") - const contractSource = readFileSync(resolve(import.meta.dirname, "../../web/lib/command-surface-contract.ts"), "utf8") - const storeSource = readFileSync(resolve(import.meta.dirname, "../../web/lib/gsd-workspace-store.tsx"), "utf8") - const surfaceSource = readFileSync(resolve(import.meta.dirname, "../../web/components/gsd/command-surface.tsx"), "utf8") - const sidebarSource = readFileSync(resolve(import.meta.dirname, "../../web/components/gsd/sidebar.tsx"), "utf8") - const gitRouteSource = readFileSync(resolve(import.meta.dirname, "../../web/app/api/git/route.ts"), "utf8") + const rpcTypesSource = readFileSync(resolve(import.meta.dirname, "../../../packages/pi-coding-agent/src/modes/rpc/rpc-types.ts"), "utf8") + const contractSource = readFileSync(resolve(import.meta.dirname, "../../../web/lib/command-surface-contract.ts"), "utf8") + const storeSource = readFileSync(resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx"), "utf8") + const surfaceSource = readFileSync(resolve(import.meta.dirname, "../../../web/components/gsd/command-surface.tsx"), "utf8") + const sidebarSource = readFileSync(resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx"), "utf8") + const gitRouteSource = readFileSync(resolve(import.meta.dirname, "../../../web/app/api/git/route.ts"), "utf8") assert.match(rpcTypesSource, /autoRetryEnabled: boolean/, "rpc-types.ts must expose retry-enabled state in get_state") assert.match(rpcTypesSource, /retryInProgress: boolean/, "rpc-types.ts must expose retry-in-progress state in get_state") diff --git a/src/tests/web-state-surfaces-contract.test.ts b/src/tests/integration/web-state-surfaces-contract.test.ts similarity index 90% rename from src/tests/web-state-surfaces-contract.test.ts rename to src/tests/integration/web-state-surfaces-contract.test.ts index d8fc6b556..58d9b89e9 100644 --- a/src/tests/web-state-surfaces-contract.test.ts +++ b/src/tests/integration/web-state-surfaces-contract.test.ts @@ -6,12 +6,12 @@ import { join, resolve } from "node:path"; // ─── Imports ────────────────────────────────────────────────────────── const workspaceIndex = await import( - "../resources/extensions/gsd/workspace-index.ts" + "../../resources/extensions/gsd/workspace-index.ts" ); -const filesRoute = await import("../../web/app/api/files/route.ts"); +const filesRoute = await import("../../../web/app/api/files/route.ts"); // Re-import status helpers from the web-side module -const workspaceStatus = await import("../../web/lib/workspace-status.ts"); +const workspaceStatus = await import("../../../web/lib/workspace-status.ts"); // ─── Helpers ────────────────────────────────────────────────────────── function makeGsdFixture(): { root: string; gsdDir: string; cleanup: () => void } { @@ -384,11 +384,11 @@ const MOCK_DATA_PATTERNS = [ /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.*Z["'](?:.*,\s*$)/m, // hardcoded ISO timestamps in array literals ]; -const webRoot = resolve(import.meta.dirname, "../../web"); +const webRoot = resolve(import.meta.dirname, "../../../web"); test("view components contain no static mock data arrays", () => { for (const filePath of VIEW_FILES) { - const fullPath = resolve(import.meta.dirname, "../..", filePath); + const fullPath = resolve(import.meta.dirname, "../../..", filePath); const source = readFileSync(fullPath, "utf-8"); for (const pattern of MOCK_DATA_PATTERNS) { const match = source.match(pattern); @@ -416,7 +416,7 @@ test("view components read from real data sources (store or API)", () => { ]; for (const filePath of STORE_VIEWS) { - const fullPath = resolve(import.meta.dirname, "../..", filePath); + const fullPath = resolve(import.meta.dirname, "../../..", filePath); const source = readFileSync(fullPath, "utf-8"); assert.ok( source.includes("gsd-workspace-store"), @@ -425,7 +425,7 @@ test("view components read from real data sources (store or API)", () => { } for (const { path: filePath, apiPattern } of API_VIEWS) { - const fullPath = resolve(import.meta.dirname, "../..", filePath); + const fullPath = resolve(import.meta.dirname, "../../..", filePath); const source = readFileSync(fullPath, "utf-8"); assert.ok( source.includes(apiPattern), @@ -438,7 +438,7 @@ test("view components read from real data sources (store or API)", () => { // from the dashboard. Live signals are visible in the terminal/power mode instead. test("status bar consumes statusTexts from store", () => { - const statusBarPath = resolve(import.meta.dirname, "../../web/components/gsd/status-bar.tsx"); + const statusBarPath = resolve(import.meta.dirname, "../../../web/components/gsd/status-bar.tsx"); const source = readFileSync(statusBarPath, "utf-8"); assert.ok( @@ -452,10 +452,10 @@ test("status bar consumes statusTexts from store", () => { }); test("browser shell renders title overrides, widgets, and editor prefills from store-backed state", () => { - const storePath = resolve(import.meta.dirname, "../../web/lib/gsd-workspace-store.tsx"); - const appShellPath = resolve(import.meta.dirname, "../../web/components/gsd/app-shell.tsx"); - const statusBarPath = resolve(import.meta.dirname, "../../web/components/gsd/status-bar.tsx"); - const terminalPath = resolve(import.meta.dirname, "../../web/components/gsd/terminal.tsx"); + const storePath = resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx"); + const appShellPath = resolve(import.meta.dirname, "../../../web/components/gsd/app-shell.tsx"); + const statusBarPath = resolve(import.meta.dirname, "../../../web/components/gsd/status-bar.tsx"); + const terminalPath = resolve(import.meta.dirname, "../../../web/components/gsd/terminal.tsx"); const storeSource = readFileSync(storePath, "utf-8"); const appShellSource = readFileSync(appShellPath, "utf-8"); @@ -478,7 +478,7 @@ test("browser shell renders title overrides, widgets, and editor prefills from s }); test("terminal consumes activeToolExecution from store", () => { - const terminalPath = resolve(import.meta.dirname, "../../web/components/gsd/terminal.tsx"); + const terminalPath = resolve(import.meta.dirname, "../../../web/components/gsd/terminal.tsx"); const source = readFileSync(terminalPath, "utf-8"); assert.ok( @@ -488,12 +488,12 @@ test("terminal consumes activeToolExecution from store", () => { }); test("live browser panels consume live selectors and expose inspectable freshness markers", () => { - const contractPath = resolve(import.meta.dirname, "../../web/lib/command-surface-contract.ts") - const storePath = resolve(import.meta.dirname, "../../web/lib/gsd-workspace-store.tsx") - const dashboardPath = resolve(import.meta.dirname, "../../web/components/gsd/dashboard.tsx") - const sidebarPath = resolve(import.meta.dirname, "../../web/components/gsd/sidebar.tsx") - const roadmapPath = resolve(import.meta.dirname, "../../web/components/gsd/roadmap.tsx") - const statusBarPath = resolve(import.meta.dirname, "../../web/components/gsd/status-bar.tsx") + const contractPath = resolve(import.meta.dirname, "../../../web/lib/command-surface-contract.ts") + const storePath = resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx") + const dashboardPath = resolve(import.meta.dirname, "../../../web/components/gsd/dashboard.tsx") + const sidebarPath = resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx") + const roadmapPath = resolve(import.meta.dirname, "../../../web/components/gsd/roadmap.tsx") + const statusBarPath = resolve(import.meta.dirname, "../../../web/components/gsd/status-bar.tsx") const contractSource = readFileSync(contractPath, "utf-8") const storeSource = readFileSync(storePath, "utf-8") @@ -528,9 +528,9 @@ test("live browser panels consume live selectors and expose inspectable freshnes }) test("workflow action surfaces route new-milestone CTAs through the shared command path", () => { - const dashboardPath = resolve(import.meta.dirname, "../../web/components/gsd/dashboard.tsx") - const sidebarPath = resolve(import.meta.dirname, "../../web/components/gsd/sidebar.tsx") - const chatPath = resolve(import.meta.dirname, "../../web/components/gsd/chat-mode.tsx") + const dashboardPath = resolve(import.meta.dirname, "../../../web/components/gsd/dashboard.tsx") + const sidebarPath = resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx") + const chatPath = resolve(import.meta.dirname, "../../../web/components/gsd/chat-mode.tsx") const dashboardSource = readFileSync(dashboardPath, "utf-8") const sidebarSource = readFileSync(sidebarPath, "utf-8") @@ -549,10 +549,10 @@ test("workflow action surfaces route new-milestone CTAs through the shared comma }) test("sidebar Git affordance opens a real git-summary surface with visible repo/not-repo/error states", () => { - const contractPath = resolve(import.meta.dirname, "../../web/lib/command-surface-contract.ts"); - const storePath = resolve(import.meta.dirname, "../../web/lib/gsd-workspace-store.tsx"); - const surfacePath = resolve(import.meta.dirname, "../../web/components/gsd/command-surface.tsx"); - const sidebarPath = resolve(import.meta.dirname, "../../web/components/gsd/sidebar.tsx"); + const contractPath = resolve(import.meta.dirname, "../../../web/lib/command-surface-contract.ts"); + const storePath = resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx"); + const surfacePath = resolve(import.meta.dirname, "../../../web/components/gsd/command-surface.tsx"); + const sidebarPath = resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx"); const contractSource = readFileSync(contractPath, "utf-8"); const storeSource = readFileSync(storePath, "utf-8"); @@ -573,11 +573,11 @@ test("sidebar Git affordance opens a real git-summary surface with visible repo/ }); test("recovery diagnostics surface stays on a dedicated route with explicit stale and action state", () => { - const contractPath = resolve(import.meta.dirname, "../../web/lib/command-surface-contract.ts"); - const storePath = resolve(import.meta.dirname, "../../web/lib/gsd-workspace-store.tsx"); - const surfacePath = resolve(import.meta.dirname, "../../web/components/gsd/command-surface.tsx"); - const dashboardPath = resolve(import.meta.dirname, "../../web/components/gsd/dashboard.tsx"); - const sidebarPath = resolve(import.meta.dirname, "../../web/components/gsd/sidebar.tsx"); + const contractPath = resolve(import.meta.dirname, "../../../web/lib/command-surface-contract.ts"); + const storePath = resolve(import.meta.dirname, "../../../web/lib/gsd-workspace-store.tsx"); + const surfacePath = resolve(import.meta.dirname, "../../../web/components/gsd/command-surface.tsx"); + const dashboardPath = resolve(import.meta.dirname, "../../../web/components/gsd/dashboard.tsx"); + const sidebarPath = resolve(import.meta.dirname, "../../../web/components/gsd/sidebar.tsx"); const contractSource = readFileSync(contractPath, "utf-8"); const storeSource = readFileSync(storePath, "utf-8"); diff --git a/src/tests/web-subprocess-module-resolution.test.ts b/src/tests/integration/web-subprocess-module-resolution.test.ts similarity index 99% rename from src/tests/web-subprocess-module-resolution.test.ts rename to src/tests/integration/web-subprocess-module-resolution.test.ts index 3c10d8057..9010eb698 100644 --- a/src/tests/web-subprocess-module-resolution.test.ts +++ b/src/tests/integration/web-subprocess-module-resolution.test.ts @@ -5,7 +5,7 @@ import { join } from "node:path" import { isUnderNodeModules, resolveSubprocessModule, -} from "../web/ts-subprocess-flags.ts" +} from "../../web/ts-subprocess-flags.ts" // --------------------------------------------------------------------------- // isUnderNodeModules — exported utility diff --git a/src/tests/web-switch-project.test.ts b/src/tests/integration/web-switch-project.test.ts similarity index 100% rename from src/tests/web-switch-project.test.ts rename to src/tests/integration/web-switch-project.test.ts diff --git a/src/tests/web-terminal-allowlist.test.ts b/src/tests/integration/web-terminal-allowlist.test.ts similarity index 84% rename from src/tests/web-terminal-allowlist.test.ts rename to src/tests/integration/web-terminal-allowlist.test.ts index c1d36341c..eca747b3f 100644 --- a/src/tests/web-terminal-allowlist.test.ts +++ b/src/tests/integration/web-terminal-allowlist.test.ts @@ -1,8 +1,8 @@ import test from "node:test"; import assert from "node:assert/strict"; -const sessionsRoute = await import("../../web/app/api/terminal/sessions/route.ts"); -const streamRoute = await import("../../web/app/api/terminal/stream/route.ts"); +const sessionsRoute = await import("../../../web/app/api/terminal/sessions/route.ts"); +const streamRoute = await import("../../../web/app/api/terminal/stream/route.ts"); test("terminal session creation rejects disallowed commands", async () => { const response = await sessionsRoute.POST( diff --git a/src/tests/web-workflow-action-execution.test.ts b/src/tests/integration/web-workflow-action-execution.test.ts similarity index 97% rename from src/tests/web-workflow-action-execution.test.ts rename to src/tests/integration/web-workflow-action-execution.test.ts index 3cc052a39..024677baa 100644 --- a/src/tests/web-workflow-action-execution.test.ts +++ b/src/tests/integration/web-workflow-action-execution.test.ts @@ -5,7 +5,7 @@ const { derivePendingWorkflowCommandLabel, executeWorkflowActionInPowerMode, navigateToGSDView, -} = await import("../../web/lib/workflow-action-execution.ts") +} = await import("../../../web/lib/workflow-action-execution.ts") test("derivePendingWorkflowCommandLabel prefers the latest input line while a command is in flight", () => { const label = derivePendingWorkflowCommandLabel({ diff --git a/src/tests/web-workflow-controls-contract.test.ts b/src/tests/integration/web-workflow-controls-contract.test.ts similarity index 98% rename from src/tests/web-workflow-controls-contract.test.ts rename to src/tests/integration/web-workflow-controls-contract.test.ts index 7e91ca9cd..897245290 100644 --- a/src/tests/web-workflow-controls-contract.test.ts +++ b/src/tests/integration/web-workflow-controls-contract.test.ts @@ -2,7 +2,7 @@ import test from "node:test"; import assert from "node:assert/strict"; // ─── Import ────────────────────────────────────────────────────────── -const { deriveWorkflowAction } = await import("../../web/lib/workflow-actions.ts"); +const { deriveWorkflowAction } = await import("../../../web/lib/workflow-actions.ts"); // ─── Helpers ────────────────────────────────────────────────────────── function baseInput(overrides: Partial[0]> = {}) { From 142da7823a8e953ccbbbc04ad3889b4087ef5452 Mon Sep 17 00:00:00 2001 From: mastertyko <11311479+mastertyko@users.noreply.github.com> Date: Fri, 27 Mar 2026 21:52:30 +0100 Subject: [PATCH 24/27] fix(gsd): prefer PREFERENCES.md in worktrees (#2796) Keep auto-worktree sync and initial seeding aligned with the repo's canonical preferences filename while retaining the lowercase legacy fallback for older repos and case-sensitive filesystems. --- src/resources/extensions/gsd/auto-worktree.ts | 49 +++++++++--- .../tests/preferences-worktree-sync.test.ts | 80 ++++++++++++++----- .../tests/worktree-preferences-sync.test.ts | 73 +++++++++++------ 3 files changed, 146 insertions(+), 56 deletions(-) diff --git a/src/resources/extensions/gsd/auto-worktree.ts b/src/resources/extensions/gsd/auto-worktree.ts index 1e9e78eb2..e94c04655 100644 --- a/src/resources/extensions/gsd/auto-worktree.ts +++ b/src/resources/extensions/gsd/auto-worktree.ts @@ -65,6 +65,8 @@ import { } from "./native-git-bridge.js"; const gsdHome = process.env.GSD_HOME || join(homedir(), ".gsd"); +const PROJECT_PREFERENCES_FILE = "PREFERENCES.md"; +const LEGACY_PROJECT_PREFERENCES_FILE = "preferences.md"; // ─── Shared Constants & Helpers ───────────────────────────────────────────── @@ -82,7 +84,7 @@ const ROOT_STATE_FILES = [ "QUEUE.md", "completed-units.json", "metrics.json", - // NOTE: preferences.md is intentionally NOT in ROOT_STATE_FILES. + // NOTE: project preferences are intentionally NOT in ROOT_STATE_FILES. // Forward-sync (main → worktree) is handled explicitly in syncGsdStateToWorktree(). // Back-sync (worktree → main) must NEVER overwrite the project root's copy // because the project root is authoritative for preferences (#2684). @@ -449,18 +451,25 @@ export function syncGsdStateToWorktree( } } - // Forward-sync preferences.md from project root to worktree (additive only). - // NOT in ROOT_STATE_FILES because syncWorktreeStateBack() must never overwrite - // the project root's preferences — the project root is authoritative (#2684). + // Forward-sync project preferences from project root to worktree (additive only). + // Prefer the canonical uppercase file name, but keep the legacy lowercase + // fallback so older repos still work on case-sensitive filesystems. { - const src = join(mainGsd, "preferences.md"); - const dst = join(wtGsd, "preferences.md"); - if (existsSync(src) && !existsSync(dst)) { - try { - cpSync(src, dst); - synced.push("preferences.md"); - } catch { - /* non-fatal */ + const worktreeHasPreferences = existsSync(join(wtGsd, PROJECT_PREFERENCES_FILE)) + || existsSync(join(wtGsd, LEGACY_PROJECT_PREFERENCES_FILE)); + if (!worktreeHasPreferences) { + for (const file of [PROJECT_PREFERENCES_FILE, LEGACY_PROJECT_PREFERENCES_FILE] as const) { + const src = join(mainGsd, file); + const dst = join(wtGsd, file); + if (existsSync(src)) { + try { + cpSync(src, dst); + synced.push(file); + } catch { + /* non-fatal */ + } + break; + } } } } @@ -995,11 +1004,25 @@ function copyPlanningArtifacts(srcBase: string, wtPath: string): void { "STATE.md", "KNOWLEDGE.md", "OVERRIDES.md", - "preferences.md", ]) { safeCopy(join(srcGsd, file), join(dstGsd, file), { force: true }); } + // Seed canonical PREFERENCES.md when available; fall back to legacy lowercase. + if (existsSync(join(srcGsd, PROJECT_PREFERENCES_FILE))) { + safeCopy( + join(srcGsd, PROJECT_PREFERENCES_FILE), + join(dstGsd, PROJECT_PREFERENCES_FILE), + { force: true }, + ); + } else if (existsSync(join(srcGsd, LEGACY_PROJECT_PREFERENCES_FILE))) { + safeCopy( + join(srcGsd, LEGACY_PROJECT_PREFERENCES_FILE), + join(dstGsd, LEGACY_PROJECT_PREFERENCES_FILE), + { force: true }, + ); + } + // Shared WAL (R012): worktrees use the project root's DB directly. // No longer copy gsd.db into the worktree — the DB path resolver in // ensureDbOpen() detects the worktree location and opens the root DB. diff --git a/src/resources/extensions/gsd/tests/preferences-worktree-sync.test.ts b/src/resources/extensions/gsd/tests/preferences-worktree-sync.test.ts index 04a0fbd0f..c7f6828a6 100644 --- a/src/resources/extensions/gsd/tests/preferences-worktree-sync.test.ts +++ b/src/resources/extensions/gsd/tests/preferences-worktree-sync.test.ts @@ -1,17 +1,19 @@ /** - * Regression tests for #2684: preferences.md must be included in both - * ROOT_STATE_FILES (sync) and copyPlanningArtifacts (initial seed). + * Regression tests for #2684 plus uppercase-preference normalization: + * preferences files are handled explicitly + * outside ROOT_STATE_FILES and prefer canonical PREFERENCES.md over the + * legacy lowercase fallback. * * Without this, post_unit_hooks and all preference-driven config silently * stop working inside auto-mode worktrees. */ import { test } from "node:test"; import assert from "node:assert/strict"; -import { readFileSync, mkdtempSync, mkdirSync, writeFileSync, existsSync, rmSync } from "node:fs"; +import { readFileSync, mkdtempSync, mkdirSync, writeFileSync, existsSync, readdirSync, rmSync } from "node:fs"; import { join } from "node:path"; import { tmpdir } from "node:os"; -test("#2684: preferences.md is NOT in ROOT_STATE_FILES (forward-only sync)", () => { +test("#2684: preferences files are NOT in ROOT_STATE_FILES (forward-only sync)", () => { const srcPath = join(import.meta.dirname, "..", "auto-worktree.ts"); const src = readFileSync(srcPath, "utf-8"); @@ -22,21 +24,23 @@ test("#2684: preferences.md is NOT in ROOT_STATE_FILES (forward-only sync)", () const arrayEnd = src.indexOf("] as const", arrayStart); const block = src.slice(arrayStart, arrayEnd); - // preferences.md must NOT be in ROOT_STATE_FILES — it is handled separately + // Project preferences must NOT be in ROOT_STATE_FILES — they are handled separately // in syncGsdStateToWorktree() (forward-only, additive). Including it in // ROOT_STATE_FILES would cause syncWorktreeStateBack() to overwrite the // authoritative project root copy (#2684). const entries = block.split("\n") .map(l => l.trim()) .filter(l => l.startsWith('"') && l.includes(".md")); - const hasPrefs = entries.some(l => l.includes("preferences.md")); + const hasPrefs = entries.some( + l => l.includes("PREFERENCES.md") || l.includes("preferences.md"), + ); assert.ok( !hasPrefs, - "preferences.md must NOT be in ROOT_STATE_FILES (back-sync would overwrite root)", + "preferences files must NOT be in ROOT_STATE_FILES (back-sync would overwrite root)", ); }); -test("#2684: copyPlanningArtifacts file list includes preferences.md", () => { +test("copyPlanningArtifacts prefers canonical PREFERENCES.md with lowercase fallback", () => { const srcPath = join(import.meta.dirname, "..", "auto-worktree.ts"); const src = readFileSync(srcPath, "utf-8"); @@ -45,15 +49,15 @@ test("#2684: copyPlanningArtifacts file list includes preferences.md", () => { assert.ok(fnIdx !== -1, "copyPlanningArtifacts function exists"); // Extract function body (up to the next top-level function) - const fnBody = src.slice(fnIdx, fnIdx + 1500); + const fnBody = src.slice(fnIdx, fnIdx + 2200); assert.ok( - fnBody.includes('"preferences.md"'), - "preferences.md should be in copyPlanningArtifacts file list", + fnBody.includes("PROJECT_PREFERENCES_FILE") && fnBody.includes("LEGACY_PROJECT_PREFERENCES_FILE"), + "copyPlanningArtifacts should prefer canonical PREFERENCES.md and retain lowercase fallback via the shared constants", ); }); -test("#2684: syncGsdStateToWorktree copies preferences.md", async () => { +test("syncGsdStateToWorktree copies canonical PREFERENCES.md", async () => { // Functional test: create a mock source and destination, call the sync const srcBase = mkdtempSync(join(tmpdir(), "gsd-wt-prefs-src-")); const dstBase = mkdtempSync(join(tmpdir(), "gsd-wt-prefs-dst-")); @@ -63,9 +67,9 @@ test("#2684: syncGsdStateToWorktree copies preferences.md", async () => { mkdirSync(dstGsd, { recursive: true }); try { - // Write a preferences.md in source + // Write a canonical PREFERENCES.md in source writeFileSync( - join(srcGsd, "preferences.md"), + join(srcGsd, "PREFERENCES.md"), "---\nversion: 1\n---\n\npost_unit_hooks:\n - name: notify\n command: echo done\n", ); @@ -73,16 +77,54 @@ test("#2684: syncGsdStateToWorktree copies preferences.md", async () => { const { syncGsdStateToWorktree } = await import("../auto-worktree.ts"); syncGsdStateToWorktree(srcBase, dstBase); - // Verify preferences.md was copied + // Verify PREFERENCES.md was copied assert.ok( - existsSync(join(dstGsd, "preferences.md")), - "preferences.md should be copied to worktree", + existsSync(join(dstGsd, "PREFERENCES.md")), + "PREFERENCES.md should be copied to worktree", ); - const content = readFileSync(join(dstGsd, "preferences.md"), "utf-8"); + const content = readFileSync(join(dstGsd, "PREFERENCES.md"), "utf-8"); assert.ok( content.includes("post_unit_hooks"), - "copied preferences.md should contain the hooks config", + "copied PREFERENCES.md should contain the hooks config", + ); + } finally { + rmSync(srcBase, { recursive: true, force: true }); + rmSync(dstBase, { recursive: true, force: true }); + } +}); + +test("syncGsdStateToWorktree falls back to legacy lowercase preferences.md", async () => { + const srcBase = mkdtempSync(join(tmpdir(), "gsd-wt-prefs-legacy-src-")); + const dstBase = mkdtempSync(join(tmpdir(), "gsd-wt-prefs-legacy-dst-")); + const srcGsd = join(srcBase, ".gsd"); + const dstGsd = join(dstBase, ".gsd"); + mkdirSync(srcGsd, { recursive: true }); + mkdirSync(dstGsd, { recursive: true }); + + try { + writeFileSync( + join(srcGsd, "preferences.md"), + "---\nversion: 1\n---\n\ngit:\n auto_push: true\n", + ); + + const { syncGsdStateToWorktree } = await import("../auto-worktree.ts"); + const result = syncGsdStateToWorktree(srcBase, dstBase); + + const copiedEntries = readdirSync(dstGsd) + .filter((name) => name === "PREFERENCES.md" || name === "preferences.md"); + + assert.ok( + copiedEntries.length === 1, + `expected exactly one preferences file in worktree, got ${copiedEntries.join(", ") || "(none)"}`, + ); + assert.ok( + copiedEntries[0] === "PREFERENCES.md" || copiedEntries[0] === "preferences.md", + "legacy fallback should still result in one readable preferences file", + ); + assert.ok( + result.synced.includes("preferences.md") || result.synced.includes("PREFERENCES.md"), + "legacy fallback copy should be reported in synced list", ); } finally { rmSync(srcBase, { recursive: true, force: true }); diff --git a/src/resources/extensions/gsd/tests/worktree-preferences-sync.test.ts b/src/resources/extensions/gsd/tests/worktree-preferences-sync.test.ts index 950421c45..691d58827 100644 --- a/src/resources/extensions/gsd/tests/worktree-preferences-sync.test.ts +++ b/src/resources/extensions/gsd/tests/worktree-preferences-sync.test.ts @@ -1,11 +1,12 @@ /** * worktree-preferences-sync.test.ts — Regression test for #2684. * - * Verifies that preferences.md is seeded into auto-mode worktrees: + * Verifies that canonical PREFERENCES.md is seeded into auto-mode worktrees, + * while legacy lowercase preferences.md remains supported: * - * 1. copyPlanningArtifacts() copies preferences.md on initial worktree creation - * 2. syncGsdStateToWorktree() forward-syncs preferences.md (additive only) - * 3. syncWorktreeStateBack() does NOT overwrite project root preferences.md + * 1. syncGsdStateToWorktree() forward-syncs PREFERENCES.md (additive only) + * 2. syncGsdStateToWorktree() still accepts legacy lowercase preferences.md + * 3. syncWorktreeStateBack() does NOT overwrite project root PREFERENCES.md */ import test from "node:test"; @@ -15,6 +16,7 @@ import { mkdirSync, mkdtempSync, readFileSync, + readdirSync, rmSync, writeFileSync, } from "node:fs"; @@ -56,35 +58,58 @@ const PREFS_CONTENT = [ ' - use: "frontend-design"', ].join("\n"); -test("#2684: syncGsdStateToWorktree forward-syncs preferences.md when missing from worktree", (t) => { +test("#2684: syncGsdStateToWorktree forward-syncs PREFERENCES.md when missing from worktree", (t) => { const mainBase = makeTempDir("main"); const wtBase = makeTempDir("wt"); t.after(() => cleanup(mainBase, wtBase)); - // Project root has preferences.md - writeFile(mainBase, ".gsd/preferences.md", PREFS_CONTENT); + // Project root has canonical PREFERENCES.md + writeFile(mainBase, ".gsd/PREFERENCES.md", PREFS_CONTENT); - // Worktree has .gsd/ but no preferences.md + // Worktree has .gsd/ but no preferences file mkdirSync(join(wtBase, ".gsd"), { recursive: true }); const result = syncGsdStateToWorktree(mainBase, wtBase); assert.ok( - existsSync(join(wtBase, ".gsd", "preferences.md")), - "preferences.md should be copied to worktree", + existsSync(join(wtBase, ".gsd", "PREFERENCES.md")), + "PREFERENCES.md should be copied to worktree", ); assert.equal( - readFileSync(join(wtBase, ".gsd", "preferences.md"), "utf-8"), + readFileSync(join(wtBase, ".gsd", "PREFERENCES.md"), "utf-8"), PREFS_CONTENT, - "preferences.md content should match source", + "PREFERENCES.md content should match source", ); assert.ok( - result.synced.includes("preferences.md"), - "preferences.md should appear in synced list", + result.synced.includes("PREFERENCES.md"), + "PREFERENCES.md should appear in synced list", ); }); -test("#2684: syncGsdStateToWorktree does NOT overwrite existing worktree preferences.md", (t) => { +test("syncGsdStateToWorktree still accepts legacy lowercase preferences.md", (t) => { + const mainBase = makeTempDir("main"); + const wtBase = makeTempDir("wt"); + t.after(() => cleanup(mainBase, wtBase)); + + writeFile(mainBase, ".gsd/preferences.md", PREFS_CONTENT); + mkdirSync(join(wtBase, ".gsd"), { recursive: true }); + + const result = syncGsdStateToWorktree(mainBase, wtBase); + + const copiedEntries = readdirSync(join(wtBase, ".gsd")) + .filter((name) => name === "PREFERENCES.md" || name === "preferences.md"); + + assert.ok( + copiedEntries.length === 1, + `expected exactly one preferences file in worktree, got ${copiedEntries.join(", ") || "(none)"}`, + ); + assert.ok( + result.synced.includes("preferences.md") || result.synced.includes("PREFERENCES.md"), + "legacy source should still appear in synced list", + ); +}); + +test("#2684: syncGsdStateToWorktree does NOT overwrite existing worktree preferences file", (t) => { const mainBase = makeTempDir("main"); const wtBase = makeTempDir("wt"); t.after(() => cleanup(mainBase, wtBase)); @@ -92,19 +117,19 @@ test("#2684: syncGsdStateToWorktree does NOT overwrite existing worktree prefere const rootPrefs = "# Root preferences\nold: true"; const wtPrefs = "# Worktree preferences\nmodified: true"; - writeFile(mainBase, ".gsd/preferences.md", rootPrefs); - writeFile(wtBase, ".gsd/preferences.md", wtPrefs); + writeFile(mainBase, ".gsd/PREFERENCES.md", rootPrefs); + writeFile(wtBase, ".gsd/PREFERENCES.md", wtPrefs); syncGsdStateToWorktree(mainBase, wtBase); assert.equal( - readFileSync(join(wtBase, ".gsd", "preferences.md"), "utf-8"), + readFileSync(join(wtBase, ".gsd", "PREFERENCES.md"), "utf-8"), wtPrefs, - "existing worktree preferences.md must not be overwritten", + "existing worktree PREFERENCES.md must not be overwritten", ); }); -test("#2684: syncWorktreeStateBack does NOT overwrite project root preferences.md", (t) => { +test("#2684: syncWorktreeStateBack does NOT overwrite project root PREFERENCES.md", (t) => { const mainBase = makeTempDir("main"); const wtBase = makeTempDir("wt"); const mid = "M001"; @@ -113,8 +138,8 @@ test("#2684: syncWorktreeStateBack does NOT overwrite project root preferences.m const rootPrefs = "# Root preferences\nauthoritative: true"; const wtPrefs = "# Worktree preferences\nstale-copy: true"; - writeFile(mainBase, ".gsd/preferences.md", rootPrefs); - writeFile(wtBase, ".gsd/preferences.md", wtPrefs); + writeFile(mainBase, ".gsd/PREFERENCES.md", rootPrefs); + writeFile(wtBase, ".gsd/PREFERENCES.md", wtPrefs); // Worktree needs at least a milestone dir for the function to proceed mkdirSync(join(wtBase, ".gsd", "milestones", mid), { recursive: true }); @@ -123,8 +148,8 @@ test("#2684: syncWorktreeStateBack does NOT overwrite project root preferences.m syncWorktreeStateBack(mainBase, wtBase, mid); assert.equal( - readFileSync(join(mainBase, ".gsd", "preferences.md"), "utf-8"), + readFileSync(join(mainBase, ".gsd", "PREFERENCES.md"), "utf-8"), rootPrefs, - "project root preferences.md must NOT be overwritten by worktree copy", + "project root PREFERENCES.md must NOT be overwritten by worktree copy", ); }); From da7f5793be1fcafa37f30ac76ce4a8d7ff42f9d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?T=C3=82CHES?= Date: Fri, 27 Mar 2026 15:22:34 -0600 Subject: [PATCH 25/27] fix(ci): copy web/components to dist-test for xterm-theme test (#2891) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The xterm-theme test reads shell-terminal.tsx and main-session-terminal.tsx via readFileSync relative to import.meta.dirname. When compiled tests run from dist-test/, this resolves to dist-test/web/components/gsd/ — but only web/lib/ was being copied by compile-tests.mjs, causing the test to fail. Co-authored-by: Claude Opus 4.6 (1M context) --- scripts/compile-tests.mjs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/compile-tests.mjs b/scripts/compile-tests.mjs index 066c02e9b..3d6ac5e57 100644 --- a/scripts/compile-tests.mjs +++ b/scripts/compile-tests.mjs @@ -130,6 +130,9 @@ async function main() { // Copy web/lib/ assets (tests import from ../../web/lib/ relative to dist-test/src/tests/) await copyAssets(join(ROOT, 'web', 'lib'), join(ROOT, 'dist-test', 'web', 'lib')); + // Copy web/components/ assets (xterm-theme test reads shell-terminal.tsx via import.meta.dirname) + await copyAssets(join(ROOT, 'web', 'components'), join(ROOT, 'dist-test', 'web', 'components')); + // Copy scripts/ non-TS files (.cjs etc) — some tests require() scripts directly await copyAssets(join(ROOT, 'scripts'), join(ROOT, 'dist-test', 'scripts')); From 3e0b2b7c6b6aaafbcd249ad7f9bda2033129d55e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?T=C3=82CHES?= Date: Fri, 27 Mar 2026 15:22:48 -0600 Subject: [PATCH 26/27] docs: rewrite gsd-orchestrator skill as agent-oriented playbook (#2889) Restructure from flat documentation reference into proper agent-oriented skill with XML structure, mental model, routing to workflows, and restored reference content (KNOWLEDGE.md, flags, event streaming, answer injection, command table). Co-authored-by: Claude Opus 4.6 (1M context) --- gsd-orchestrator/SKILL.md | 455 +++++++++++++------------------------- 1 file changed, 148 insertions(+), 307 deletions(-) diff --git a/gsd-orchestrator/SKILL.md b/gsd-orchestrator/SKILL.md index 48e044b8c..ad423afdf 100644 --- a/gsd-orchestrator/SKILL.md +++ b/gsd-orchestrator/SKILL.md @@ -1,13 +1,11 @@ --- name: gsd-orchestrator description: > - Orchestrate GSD (Get Shit Done) projects via subprocess execution. - Use when an agent needs to create milestones from specs, execute software - development workflows, monitor task progress, poll status, handle blockers, - or track costs. Triggers on requests to "run gsd", "create milestone", - "execute project", "check gsd status", "orchestrate development", - "run headless workflow", or any programmatic interaction with the GSD - project management system. + Build software products autonomously via GSD headless mode. Handles the full + lifecycle: write a spec, launch a build, poll for completion, handle blockers, + track costs, and verify the result. Use when asked to "build something", + "create a project", "run gsd", "check build status", or any task that + requires autonomous software development via subprocess. metadata: openclaw: requires: @@ -18,267 +16,166 @@ metadata: bins: [gsd] --- -# GSD Orchestrator + +You are an autonomous agent that builds software by orchestrating GSD as a subprocess. +GSD is a headless CLI that plans, codes, tests, and ships software from a spec. +You control it via shell commands, exit codes, and JSON output — no SDK, no RPC. + -Run GSD commands as subprocesses via `gsd headless`. No SDK, no RPC — just shell exec, exit codes, and JSON on stdout. + +GSD headless is a subprocess you launch and monitor. Think of it like a junior developer +you hand a spec to: -## Quick Start +1. You write the spec (what to build) +2. You launch the build (`gsd headless ... new-milestone --context spec.md --auto`) +3. You wait for it to finish (exit code tells you the outcome) +4. You check the result (query state, inspect files, verify deliverables) +5. If blocked, you intervene (steer, supply answers, or escalate) +The subprocess handles all planning, coding, testing, and git commits internally. +You never write application code yourself — GSD does that. + + + +- **Flags before command.** `gsd headless [--flags] [command] [args]`. Flags after the command are ignored. +- **Redirect stderr.** JSON output goes to stdout. Progress goes to stderr. Always `2>/dev/null` when parsing JSON. +- **Check exit codes.** 0=success, 1=error, 10=blocked (needs you), 11=cancelled. +- **Use `query` to poll.** Instant (~50ms), no LLM cost. Use it between steps, not `auto` for status. +- **Budget awareness.** Track `cost.total` from query results. Set limits before launching long runs. +- **One project directory per build.** Each GSD project needs its own directory with a `.gsd/` folder. + + + +Route based on what you need to do: + +**Build something from scratch:** +Read `workflows/build-from-spec.md` — write spec, init directory, launch, monitor, verify. + +**Check on a running or completed build:** +Read `workflows/monitor-and-poll.md` — query state, interpret phases, handle blockers. + +**Execute with fine-grained control:** +Read `workflows/step-by-step.md` — run one unit at a time with decision points. + +**Understand the JSON output:** +Read `references/json-result.md` — field reference for HeadlessJsonResult. + +**Pre-supply answers or secrets:** +Read `references/answer-injection.md` — answer file schema and injection mechanism. + +**Look up a specific command:** +Read `references/commands.md` — full command reference with flags and examples. + + + + +**Launch a full build (spec to working code):** ```bash -# Install GSD globally -npm install -g gsd-pi - -# Verify installation -gsd --version - -# Create a milestone from a spec and execute it -gsd headless --output-format json new-milestone --context spec.md --auto +mkdir -p /tmp/my-project && cd /tmp/my-project && git init +cat > spec.md << 'EOF' +# Your Product Spec Here +Build a ... +EOF +gsd headless --output-format json --context spec.md new-milestone --auto 2>/dev/null ``` -## Command Syntax - +**Check project state (instant, free):** ```bash -gsd headless [flags] [command] [args...] +cd /path/to/project +gsd headless query | jq '{phase: .state.phase, progress: .state.progress, cost: .cost.total}' ``` -Default command is `auto` (run all queued units). +**Resume work on an existing project:** +```bash +cd /path/to/project +gsd headless --output-format json auto 2>/dev/null +``` -### Flags +**Run one step at a time:** +```bash +RESULT=$(gsd headless --output-format json next 2>/dev/null) +echo "$RESULT" | jq '{status: .status, phase: .phase, cost: .cost.total}' +``` + + + +| Code | Meaning | Your action | +|------|---------|-------------| +| `0` | Success | Check deliverables, verify output, report completion | +| `1` | Error or timeout | Inspect stderr, check `.gsd/STATE.md`, retry or escalate | +| `10` | Blocked | Query state for blocker details, steer around it or escalate to human | +| `11` | Cancelled | Process was interrupted — resume with `--resume ` or restart | + + + +GSD creates and manages all state in `.gsd/`: +``` +.gsd/ + PROJECT.md # What this project is + REQUIREMENTS.md # Capability contract + DECISIONS.md # Architectural decisions (append-only) + KNOWLEDGE.md # Persistent project knowledge (patterns, rules, lessons) + STATE.md # Current phase and next action + milestones/ + M001-xxxxx/ + M001-xxxxx-CONTEXT.md # Scope, constraints, assumptions + M001-xxxxx-ROADMAP.md # Slices with checkboxes + M001-xxxxx-SUMMARY.md # Completion summary + slices/S01/ + S01-PLAN.md # Tasks + S01-SUMMARY.md # Slice summary + tasks/ + T01-PLAN.md # Individual task spec + T01-SUMMARY.md # Task completion summary +``` + +State is derived from files on disk — checkboxes in ROADMAP.md and PLAN.md are the source of truth for completion. You never need to edit these files. GSD manages them. But you can read them to understand progress. + + + | Flag | Description | |------|-------------| -| `--output-format ` | Output format: `text` (default), `json` (structured result at exit), `stream-json` (JSONL events) | +| `--output-format ` | `text` (default), `json` (structured result at exit), `stream-json` (JSONL events) | | `--json` | Alias for `--output-format stream-json` — JSONL event stream to stdout | -| `--bare` | Minimal context: skip CLAUDE.md, AGENTS.md, user settings, user skills. Use for CI/ecosystem runs. | +| `--bare` | Skip CLAUDE.md, AGENTS.md, user settings, user skills. Use for CI/ecosystem runs. | | `--resume ` | Resume a prior headless session by its session ID | -| `--timeout N` | Overall timeout in ms (default: 300000) | +| `--timeout N` | Overall timeout in ms (default: 300000, use 0 to disable) | | `--model ID` | Override LLM model | | `--supervised` | Forward interactive UI requests to orchestrator via stdout/stdin | | `--response-timeout N` | Timeout (ms) for orchestrator response in supervised mode (default: 30000) | | `--answers ` | Pre-supply answers and secrets from JSON file | -| `--events ` | Filter JSONL output to specific event types (comma-separated, implies `--json`) | +| `--events ` | Filter JSONL to specific event types (comma-separated, implies `--json`) | | `--verbose` | Show tool calls in progress output | +| `--context ` | Spec file path for `new-milestone` (use `-` for stdin) | +| `--context-text ` | Inline spec text for `new-milestone` | +| `--auto` | Chain into auto-mode after `new-milestone` | + -### Exit Codes - -| Code | Meaning | Constant | -|------|---------|----------| -| `0` | Success — unit/milestone completed | `EXIT_SUCCESS` | -| `1` | Error or timeout | `EXIT_ERROR` | -| `10` | Blocked — needs human intervention | `EXIT_BLOCKED` | -| `11` | Cancelled by user or orchestrator | `EXIT_CANCELLED` | - -These codes are stable and suitable for CI pipelines and orchestrator logic. - -### Output Formats - -| Format | Behavior | -|--------|----------| -| `text` | Human-readable progress on stderr. Default. | -| `json` | Collect events silently. Emit a single `HeadlessJsonResult` JSON object to stdout at exit. | -| `stream-json` | Stream JSONL events to stdout in real time (same as `--json`). | - -Use `--output-format json` when you need a structured result for decision-making. See [references/json-result.md](references/json-result.md) for the full field reference. - -## Core Workflows - -### 1. Create + Execute a Milestone (end-to-end) + +Pre-supply answers and secrets for fully autonomous runs: ```bash -gsd headless --output-format json new-milestone --context spec.md --auto +gsd headless --answers answers.json --output-format json auto 2>/dev/null ``` -Reads a spec file, bootstraps `.gsd/`, creates the milestone, then chains into auto-mode executing all phases (discuss → research → plan → execute → summarize → complete). The JSON result is emitted on stdout at exit. - -Extra flags for `new-milestone`: -- `--context ` — path to spec/PRD file (use `-` for stdin) -- `--context-text ` — inline specification text -- `--auto` — start auto-mode after milestone creation -- `--verbose` — show tool calls in progress output - -```bash -# From stdin -cat spec.md | gsd headless --output-format json new-milestone --context - --auto - -# Inline text -gsd headless new-milestone --context-text "Build a REST API for user management" --auto -``` - -### 2. Run All Queued Work - -```bash -gsd headless --output-format json auto -``` - -Loop through all pending units until milestone complete or blocked. - -### 3. Run One Unit (step-by-step) - -```bash -gsd headless --output-format json next -``` - -Execute exactly one unit (task/slice/milestone step), then exit. This is the recommended pattern for orchestrators that need control between steps. - -### 4. Instant State Snapshot (no LLM) - -```bash -gsd headless query -``` - -Returns a single JSON object with the full project snapshot — no LLM session, instant (~50ms). **This is the recommended way for orchestrators to inspect state.** - ```json { - "state": { - "phase": "executing", - "activeMilestone": { "id": "M001", "title": "..." }, - "activeSlice": { "id": "S01", "title": "..." }, - "progress": { "completed": 3, "total": 7 }, - "registry": [...] - }, - "next": { "action": "dispatch", "unitType": "execute-task", "unitId": "M001/S01/T01" }, - "cost": { "workers": [{ "milestoneId": "M001", "cost": 1.50 }], "total": 1.50 } + "questions": { "question_id": "selected_option" }, + "secrets": { "API_KEY": "sk-..." }, + "defaults": { "strategy": "first_option" } } ``` -### 5. Dispatch Specific Phase +- **questions** — question ID to answer (string for single-select, string[] for multi-select) +- **secrets** — env var to value, injected into child process environment +- **defaults.strategy** — `"first_option"` (default) or `"cancel"` for unmatched questions -```bash -gsd headless dispatch research|plan|execute|complete|reassess|uat|replan -``` +See `references/answer-injection.md` for the full mechanism. + -Force-route to a specific phase, bypassing normal state-machine routing. - -### 6. Resume a Session - -```bash -gsd headless --resume auto -``` - -Resume a prior headless session. The session ID is available in the `HeadlessJsonResult.sessionId` field from a previous `--output-format json` run. - -## Orchestrator Patterns - -### Parse the Structured JSON Result - -When using `--output-format json`, the process emits a single `HeadlessJsonResult` on stdout at exit. Parse it for decision-making: - -```bash -RESULT=$(gsd headless --output-format json next 2>/dev/null) -EXIT=$? - -STATUS=$(echo "$RESULT" | jq -r '.status') -COST=$(echo "$RESULT" | jq -r '.cost.total') -PHASE=$(echo "$RESULT" | jq -r '.phase') -NEXT=$(echo "$RESULT" | jq -r '.nextAction') -SESSION_ID=$(echo "$RESULT" | jq -r '.sessionId') - -echo "Status: $STATUS, Cost: \$${COST}, Phase: $PHASE, Next: $NEXT" -``` - -See [references/json-result.md](references/json-result.md) for the full field reference. - -### Blocker Detection and Handling - -Exit code `10` means the execution hit a blocker requiring human intervention: - -```bash -gsd headless --output-format json next 2>/dev/null -EXIT=$? - -if [ $EXIT -eq 10 ]; then - # Inspect the blocker - BLOCKER=$(gsd headless query | jq '.state.phase') - echo "Blocked: $BLOCKER" - - # Option 1: Use --supervised mode to handle interactively - gsd headless --supervised auto - - # Option 2: Pre-supply answers to resolve the blocker - gsd headless --answers blocker-answers.json auto - - # Option 3: Steer the plan to work around it - gsd headless steer "Skip the blocked dependency, use mock instead" -fi -``` - -### Cost Tracking and Budget Enforcement - -```bash -MAX_BUDGET=10.00 - -RESULT=$(gsd headless --output-format json next 2>/dev/null) -COST=$(echo "$RESULT" | jq -r '.cost.total') - -# Check cumulative cost via query (includes all workers) -TOTAL_COST=$(gsd headless query | jq -r '.cost.total') - -if (( $(echo "$TOTAL_COST > $MAX_BUDGET" | bc -l) )); then - echo "Budget exceeded: \$$TOTAL_COST > \$$MAX_BUDGET" - gsd headless stop - exit 1 -fi -``` - -### Step-by-Step with Monitoring - -The recommended pattern for full control. Run one unit at a time, inspect state between steps: - -```bash -while true; do - RESULT=$(gsd headless --output-format json next 2>/dev/null) - EXIT=$? - - STATUS=$(echo "$RESULT" | jq -r '.status') - COST=$(echo "$RESULT" | jq -r '.cost.total') - - echo "Exit: $EXIT, Status: $STATUS, Cost: \$$COST" - - # Handle terminal states - [ $EXIT -eq 0 ] || break - - # Check if milestone is complete - PHASE=$(gsd headless query | jq -r '.state.phase') - [ "$PHASE" = "complete" ] && echo "Milestone complete" && break - - # Budget check - TOTAL=$(gsd headless query | jq -r '.cost.total') - if (( $(echo "$TOTAL > 20.00" | bc -l) )); then - echo "Budget limit reached" - break - fi -done -``` - -### Poll-and-React Loop - -Lightweight pattern using only the instant `query` command: - -```bash -PHASE=$(gsd headless query | jq -r '.state.phase') -NEXT_ACTION=$(gsd headless query | jq -r '.next.action') - -case "$PHASE" in - complete) echo "Done" ;; - blocked) echo "Needs intervention — exit code 10" ;; - *) [ "$NEXT_ACTION" = "dispatch" ] && gsd headless next ;; -esac -``` - -### CI/Ecosystem Mode - -Use `--bare` to skip user-specific configuration for deterministic CI runs: - -```bash -gsd headless --bare --output-format json auto 2>/dev/null -``` - -This skips CLAUDE.md, AGENTS.md, user settings, and user skills. Bundled GSD extensions and `.gsd/` state are still loaded (they're required for GSD to function). - -### JSONL Event Stream - -Use `--json` (or `--output-format stream-json`) for real-time events: + +For real-time monitoring, use JSONL event streaming: ```bash gsd headless --json auto 2>/dev/null | while read -r line; do @@ -291,84 +188,28 @@ gsd headless --json auto 2>/dev/null | while read -r line; do done ``` -### Filtered Event Stream +Filter to specific events: `--events agent_end,execution_complete,extension_ui_request` -Use `--events` to receive only specific event types: - -```bash -# Only phase-relevant events -gsd headless --events agent_end,extension_ui_request auto 2>/dev/null - -# Only tool execution events -gsd headless --events tool_execution_start,tool_execution_end auto -``` - -Available event types: `agent_start`, `agent_end`, `tool_execution_start`, `tool_execution_end`, `tool_execution_update`, `extension_ui_request`, `message_start`, `message_end`, `message_update`, `turn_start`, `turn_end`. - -## Answer Injection - -Pre-supply answers and secrets for fully autonomous headless runs: - -```bash -gsd headless --answers answers.json auto -``` - -Answer file schema: -```json -{ - "questions": { "question_id": "selected_option" }, - "secrets": { "API_KEY": "sk-..." }, - "defaults": { "strategy": "first_option" } -} -``` - -- **questions** — question ID → answer (string for single-select, string[] for multi-select) -- **secrets** — env var → value, injected into child process environment -- **defaults.strategy** — `"first_option"` (default) or `"cancel"` for unmatched questions - -See [references/answer-injection.md](references/answer-injection.md) for the full mechanism. - -## GSD Project Structure - -All state lives in `.gsd/` as markdown files (version-controllable): - -``` -.gsd/ - PROJECT.md - REQUIREMENTS.md - DECISIONS.md - KNOWLEDGE.md - STATE.md - milestones/ - M001/ - M001-CONTEXT.md # Requirements, scope, decisions - M001-ROADMAP.md # Slices with tasks, dependencies, checkboxes - M001-SUMMARY.md # Completion summary - slices/ - S01/ - S01-PLAN.md # Task list - S01-SUMMARY.md # Slice summary - tasks/ - T01-PLAN.md # Individual task spec - T01-SUMMARY.md # Task completion summary -``` - -State is derived from files on disk — checkboxes in ROADMAP.md and PLAN.md are the source of truth for completion. - -## All Commands - -See [references/commands.md](references/commands.md) for the complete reference. +Available types: `agent_start`, `agent_end`, `tool_execution_start`, `tool_execution_end`, +`tool_execution_update`, `extension_ui_request`, `message_start`, `message_end`, +`message_update`, `turn_start`, `turn_end`, `cost_update`, `execution_complete`. + + | Command | Purpose | |---------|---------| -| `auto` | Run all queued units (default) | -| `next` | Run one unit | -| `query` | Instant JSON snapshot — state, next dispatch, costs (no LLM) | -| `new-milestone` | Create milestone from spec | -| `dispatch ` | Force specific phase | +| `auto` | Run all queued units until milestone complete or blocked (default) | +| `next` | Run exactly one unit, then exit | +| `query` | Instant JSON snapshot — state, next dispatch, costs (no LLM, ~50ms) | +| `new-milestone` | Create milestone from spec file | +| `dispatch ` | Force specific phase (research, plan, execute, complete, reassess, uat, replan) | | `stop` / `pause` | Control auto-mode | | `steer ` | Hard-steer plan mid-execution | | `skip` / `undo` | Unit control | | `queue` | Queue/reorder milestones | | `history` | View execution history | | `doctor` | Health check + auto-fix | +| `knowledge ` | Add persistent project knowledge | + +See `references/commands.md` for the complete reference. + From b5715c20bb09d02e10e29a3642ba1523855e62e0 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 27 Mar 2026 21:29:07 +0000 Subject: [PATCH 27/27] release: v2.56.0 --- CHANGELOG.md | 19 ++++++++++++++++++- native/npm/darwin-arm64/package.json | 2 +- native/npm/darwin-x64/package.json | 2 +- native/npm/linux-arm64-gnu/package.json | 2 +- native/npm/linux-x64-gnu/package.json | 2 +- native/npm/win32-x64-msvc/package.json | 2 +- package.json | 2 +- packages/pi-coding-agent/package.json | 2 +- pkg/package.json | 2 +- 9 files changed, 26 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2dc4f3e2e..20ca48263 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,22 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] +## [2.56.0] - 2026-03-27 + +### Added +- **parallel**: /gsd parallel watch — native TUI overlay for worker monitoring (#2806) + +### Fixed +- **ci**: copy web/components to dist-test for xterm-theme test (#2891) +- **gsd**: prefer PREFERENCES.md in worktrees (#2796) +- **gsd**: resume auto-mode after transient provider pause (#2822) +- **parallel**: resolve session lock contention and 3 related parallel-mode bugs (#2184) (#2800) +- **web**: improve light theme terminal contrast (#2819) +- **gsd**: preserve auto start model through discuss (#2837) + +### Changed +- **test**: compile unit tests with esbuild, reclassify integration tests, fix node_modules symlink (#2809) + ## [2.55.0] - 2026-03-27 ### Added @@ -2095,7 +2111,8 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ### Changed - License updated to MIT -[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.55.0...HEAD +[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.56.0...HEAD +[2.56.0]: https://github.com/gsd-build/gsd-2/compare/v2.55.0...v2.56.0 [2.55.0]: https://github.com/gsd-build/gsd-2/compare/v2.54.0...v2.55.0 [2.54.0]: https://github.com/gsd-build/gsd-2/compare/v2.53.0...v2.54.0 [2.53.0]: https://github.com/gsd-build/gsd-2/compare/v2.52.0...v2.53.0 diff --git a/native/npm/darwin-arm64/package.json b/native/npm/darwin-arm64/package.json index 3216cd21f..7d1a183bf 100644 --- a/native/npm/darwin-arm64/package.json +++ b/native/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-darwin-arm64", - "version": "2.55.0", + "version": "2.56.0", "description": "GSD native engine binary for macOS ARM64", "os": [ "darwin" diff --git a/native/npm/darwin-x64/package.json b/native/npm/darwin-x64/package.json index f30ce8379..958bfcb50 100644 --- a/native/npm/darwin-x64/package.json +++ b/native/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-darwin-x64", - "version": "2.55.0", + "version": "2.56.0", "description": "GSD native engine binary for macOS Intel", "os": [ "darwin" diff --git a/native/npm/linux-arm64-gnu/package.json b/native/npm/linux-arm64-gnu/package.json index 2d201f5ee..4eadbb2e9 100644 --- a/native/npm/linux-arm64-gnu/package.json +++ b/native/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-linux-arm64-gnu", - "version": "2.55.0", + "version": "2.56.0", "description": "GSD native engine binary for Linux ARM64 (glibc)", "os": [ "linux" diff --git a/native/npm/linux-x64-gnu/package.json b/native/npm/linux-x64-gnu/package.json index e87092d07..ed98cd9c2 100644 --- a/native/npm/linux-x64-gnu/package.json +++ b/native/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-linux-x64-gnu", - "version": "2.55.0", + "version": "2.56.0", "description": "GSD native engine binary for Linux x64 (glibc)", "os": [ "linux" diff --git a/native/npm/win32-x64-msvc/package.json b/native/npm/win32-x64-msvc/package.json index 159255c52..d7c7e2c62 100644 --- a/native/npm/win32-x64-msvc/package.json +++ b/native/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@gsd-build/engine-win32-x64-msvc", - "version": "2.55.0", + "version": "2.56.0", "description": "GSD native engine binary for Windows x64 (MSVC)", "os": [ "win32" diff --git a/package.json b/package.json index 4b2bcf3d4..0d749d0a4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "gsd-pi", - "version": "2.55.0", + "version": "2.56.0", "description": "GSD — Get Shit Done coding agent", "license": "MIT", "repository": { diff --git a/packages/pi-coding-agent/package.json b/packages/pi-coding-agent/package.json index 82bcd709a..f6d269b13 100644 --- a/packages/pi-coding-agent/package.json +++ b/packages/pi-coding-agent/package.json @@ -1,6 +1,6 @@ { "name": "@gsd/pi-coding-agent", - "version": "2.55.0", + "version": "2.56.0", "description": "Coding agent CLI (vendored from pi-mono)", "type": "module", "piConfig": { diff --git a/pkg/package.json b/pkg/package.json index 8387ef366..b3a30644b 100644 --- a/pkg/package.json +++ b/pkg/package.json @@ -1,6 +1,6 @@ { "name": "@glittercowboy/gsd", - "version": "2.55.0", + "version": "2.56.0", "piConfig": { "name": "gsd", "configDir": ".gsd"