From fa376bf816863d75e2b6309001bbab4bdd3f30e7 Mon Sep 17 00:00:00 2001 From: Lex Christopherson Date: Tue, 24 Mar 2026 13:21:19 -0600 Subject: [PATCH] merge: incorporate main into next (resolve 26 conflicts) Merges 39 commits from main into next, including: - WAL/journal runtime exclusion fixes (#2299) - Memory and resource leak fixes (#2314) - Freeform DECISIONS.md preservation (#2319) - Per-prompt token cost display (#2357) - Web UI project root switching (#2355) - CODEOWNERS and team workflow docs (#2286) - CI flake threshold fix (#2327) - Various other bugfixes All conflicts resolved preserving both PR #2280 DB-backed planning functionality and main's bugfixes. Co-Authored-By: Claude Opus 4.6 (1M context) --- .github/CODEOWNERS | 36 ++ .github/workflows/ai-triage.yml | 2 +- .github/workflows/ci.yml | 5 + .github/workflows/pr-risk.yml | 14 +- CHANGELOG.md | 38 +- CONTRIBUTING.md | 64 ++- README.md | 23 + docs/commands.md | 1 + docs/troubleshooting.md | 42 ++ docs/web-interface.md | 24 +- .../18-quick-reference-commands-shortcuts.md | 2 + native/crates/engine/src/glob.rs | 8 +- native/crates/engine/src/image.rs | 19 +- native/crates/engine/src/ttsr.rs | 45 +- packages/pi-ai/src/models.custom.ts | 172 +++++++ packages/pi-ai/src/models.test.ts | 85 ++++ packages/pi-ai/src/models.ts | 18 +- packages/pi-coding-agent/package.json | 2 +- .../pi-coding-agent/src/core/agent-session.ts | 21 +- .../src/core/auth-storage.test.ts | 68 +++ .../pi-coding-agent/src/core/auth-storage.ts | 7 +- .../src/core/extensions/loader.ts | 18 + .../pi-coding-agent/src/core/lsp/client.ts | 133 +++++- .../src/core/package-manager.ts | 157 ++++--- .../src/core/resource-loader.ts | 30 +- .../pi-coding-agent/src/core/system-prompt.ts | 11 +- .../components/extension-editor.ts | 3 + .../modes/interactive/components/footer.ts | 20 + .../src/modes/interactive/interactive-mode.ts | 44 +- .../src/modes/interactive/theme/theme.ts | 25 +- .../pi-coding-agent/src/modes/print-mode.ts | 74 ++-- .../src/modes/rpc/rpc-client.ts | 10 +- .../pi-coding-agent/src/modes/rpc/rpc-mode.ts | 3 +- pkg/package.json | 2 +- scripts/install-hooks.sh | 34 -- scripts/watch-resources.js | 13 +- src/cli.ts | 24 +- src/loader.ts | 4 +- src/resource-loader.ts | 49 +- .../async-jobs/async-bash-timeout.test.ts | 122 +++++ .../extensions/async-jobs/async-bash-tool.ts | 44 +- .../extensions/async-jobs/await-tool.test.ts | 47 ++ .../extensions/async-jobs/await-tool.ts | 5 + src/resources/extensions/async-jobs/index.ts | 1 + .../extensions/async-jobs/job-manager.ts | 2 + src/resources/extensions/bg-shell/overlay.ts | 4 + src/resources/extensions/gsd/auto-prompts.ts | 20 +- src/resources/extensions/gsd/auto-start.ts | 17 +- .../extensions/gsd/auto-supervisor.ts | 14 + src/resources/extensions/gsd/auto-worktree.ts | 92 +++- .../extensions/gsd/auto/loop-deps.ts | 1 - src/resources/extensions/gsd/auto/phases.ts | 4 +- .../gsd/bootstrap/register-hooks.ts | 25 +- src/resources/extensions/gsd/db-writer.ts | 78 +++- src/resources/extensions/gsd/detection.ts | 19 + src/resources/extensions/gsd/doctor-checks.ts | 33 +- .../extensions/gsd/doctor-environment.ts | 31 ++ .../extensions/gsd/doctor-providers.ts | 13 + src/resources/extensions/gsd/doctor-types.ts | 1 + src/resources/extensions/gsd/file-watcher.ts | 5 +- src/resources/extensions/gsd/forensics.ts | 92 ++++ src/resources/extensions/gsd/git-service.ts | 78 +--- src/resources/extensions/gsd/gitignore.ts | 6 +- src/resources/extensions/gsd/gsd-db.ts | 20 +- .../extensions/gsd/native-git-bridge.ts | 13 +- .../extensions/gsd/parallel-orchestrator.ts | 43 ++ .../extensions/gsd/preferences-types.ts | 6 + .../extensions/gsd/preferences-validation.ts | 9 + src/resources/extensions/gsd/preferences.ts | 69 ++- .../extensions/gsd/prompts/forensics.md | 2 + src/resources/extensions/gsd/repo-identity.ts | 53 ++- src/resources/extensions/gsd/service-tier.ts | 21 +- src/resources/extensions/gsd/session-lock.ts | 4 +- .../extensions/gsd/tests/activity-log.test.ts | 100 ++--- .../gsd/tests/auto-stash-merge.test.ts | 121 +++++ .../auto-worktree-milestone-merge.test.ts | 35 +- .../gsd/tests/derive-state-db.test.ts | 5 +- .../tests/doctor-environment-worktree.test.ts | 175 ++++++++ .../gsd/tests/forensics-dedup.test.ts | 48 ++ .../gsd/tests/freeform-decisions.test.ts | 240 ++++++++++ .../extensions/gsd/tests/git-service.test.ts | 31 +- .../extensions/gsd/tests/gsd-recover.test.ts | 2 + .../extensions/gsd/tests/journal.test.ts | 227 ++++------ .../gsd/tests/manifest-status.test.ts | 157 ++++--- .../gsd/tests/markdown-renderer.test.ts | 1 + .../gsd/tests/prompt-contracts.test.ts | 22 +- .../gsd/tests/rogue-file-detection.test.ts | 31 ++ .../extensions/gsd/tests/service-tier.test.ts | 31 +- .../gsd/tests/skill-activation.test.ts | 59 ++- .../tests/symlink-numbered-variants.test.ts | 151 +++++++ .../gsd/tests/token-cost-display.test.ts | 118 +++++ .../gsd/tests/verification-gate.test.ts | 419 +++++++----------- .../tests/worktree-health-dispatch.test.ts | 117 ++--- .../gsd/tests/worktree-manager.test.ts | 165 +++---- .../gsd/tests/worktree-resolver.test.ts | 3 +- .../extensions/gsd/worktree-resolver.ts | 5 +- src/resources/extensions/gsd/worktree.ts | 4 +- src/resources/extensions/mcp-client/index.ts | 6 +- .../extensions/search-the-web/tool-search.ts | 6 +- src/tests/search-loop-guard.test.ts | 33 +- src/tests/startup-perf.test.ts | 160 +++++++ src/tests/web-boot-node24.test.ts | 23 + src/tests/web-bridge-contract.test.ts | 74 ++++ src/tests/web-onboarding-contract.test.ts | 131 +++++- .../web-subprocess-module-resolution.test.ts | 157 +++++++ src/tests/web-switch-project.test.ts | 277 ++++++++++++ src/web-mode.ts | 10 +- src/web/auto-dashboard-service.ts | 30 +- src/web/bridge-service.ts | 27 +- src/web/captures-service.ts | 36 +- src/web/cleanup-service.ts | 36 +- src/web/doctor-service.ts | 54 +-- src/web/export-service.ts | 21 +- src/web/forensics-service.ts | 21 +- src/web/history-service.ts | 21 +- src/web/hooks-service.ts | 21 +- src/web/onboarding-service.ts | 2 +- src/web/recovery-diagnostics-service.ts | 30 +- src/web/settings-service.ts | 49 +- src/web/skill-health-service.ts | 21 +- src/web/ts-subprocess-flags.ts | 74 +++- src/web/undo-service.ts | 42 +- src/web/visualizer-service.ts | 21 +- web/app/api/switch-root/route.ts | 109 +++++ web/components/gsd/projects-view.tsx | 110 ++++- 125 files changed, 4809 insertions(+), 1404 deletions(-) create mode 100644 .github/CODEOWNERS create mode 100644 packages/pi-ai/src/models.custom.ts create mode 100644 packages/pi-ai/src/models.test.ts delete mode 100755 scripts/install-hooks.sh create mode 100644 src/resources/extensions/async-jobs/async-bash-timeout.test.ts create mode 100644 src/resources/extensions/gsd/tests/auto-stash-merge.test.ts create mode 100644 src/resources/extensions/gsd/tests/doctor-environment-worktree.test.ts create mode 100644 src/resources/extensions/gsd/tests/forensics-dedup.test.ts create mode 100644 src/resources/extensions/gsd/tests/freeform-decisions.test.ts create mode 100644 src/resources/extensions/gsd/tests/symlink-numbered-variants.test.ts create mode 100644 src/resources/extensions/gsd/tests/token-cost-display.test.ts create mode 100644 src/tests/startup-perf.test.ts create mode 100644 src/tests/web-subprocess-module-resolution.test.ts create mode 100644 src/tests/web-switch-project.test.ts create mode 100644 web/app/api/switch-root/route.ts diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..f54b9a409 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,36 @@ +# CODEOWNERS +# Defines required reviewers per path. GitHub enforces these on PRs. +# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners +# +# Format: <@user or @org/team> +# Last matching rule wins. + +# Default: maintainers review everything not explicitly matched below +* @gsd-build/maintainers + +# Core agent orchestration — RFC required, senior review only +packages/pi-agent-core/ @gsd-build/maintainers +src/resources/extensions/gsd/ @gsd-build/maintainers + +# AI/LLM provider integrations +packages/pi-ai/ @gsd-build/maintainers + +# Terminal UI +packages/pi-tui/ @gsd-build/maintainers + +# Native bindings — platform-specific, needs careful review +native/ @gsd-build/maintainers + +# CI/CD and release pipeline — high blast radius +.github/ @gsd-build/maintainers +scripts/ @gsd-build/maintainers +Dockerfile @gsd-build/maintainers + +# Security-sensitive files — always require maintainer sign-off +.secretscanignore @gsd-build/maintainers +scripts/secret-scan.sh @gsd-build/maintainers +scripts/install-hooks.sh @gsd-build/maintainers + +# Contributor-facing docs — keep accurate, maintainers approve +CONTRIBUTING.md @gsd-build/maintainers +VISION.md @gsd-build/maintainers diff --git a/.github/workflows/ai-triage.yml b/.github/workflows/ai-triage.yml index b07fc8c46..f1e3e1abe 100644 --- a/.github/workflows/ai-triage.yml +++ b/.github/workflows/ai-triage.yml @@ -14,7 +14,7 @@ jobs: triage: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: sparse-checkout: | VISION.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 30bfa4a6f..b76dc34cb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,6 +24,7 @@ concurrency: jobs: detect-changes: + timeout-minutes: 2 runs-on: ubuntu-latest outputs: docs-only: ${{ steps.check.outputs.docs-only }} @@ -59,6 +60,7 @@ jobs: fi docs-check: + timeout-minutes: 5 runs-on: ubuntu-latest needs: detect-changes steps: @@ -70,6 +72,7 @@ jobs: run: bash scripts/docs-prompt-injection-scan.sh --diff origin/main lint: + timeout-minutes: 5 needs: detect-changes runs-on: ubuntu-latest steps: @@ -96,6 +99,7 @@ jobs: run: node scripts/check-skill-references.mjs build: + timeout-minutes: 15 needs: detect-changes if: needs.detect-changes.outputs.docs-only != 'true' runs-on: ubuntu-latest @@ -135,6 +139,7 @@ jobs: run: npm run test:integration windows-portability: + timeout-minutes: 15 needs: detect-changes if: >- needs.detect-changes.outputs.docs-only != 'true' && diff --git a/.github/workflows/pr-risk.yml b/.github/workflows/pr-risk.yml index bde087b7a..298d64851 100644 --- a/.github/workflows/pr-risk.yml +++ b/.github/workflows/pr-risk.yml @@ -19,14 +19,14 @@ jobs: steps: # Checkout the BASE branch — our trusted script and map, not fork code. - name: Checkout base - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: ref: ${{ github.base_ref }} - name: Setup Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: - node-version: '20' + node-version: '24' # Use the GitHub API to get changed files — no fork code is executed. - name: Get changed files @@ -44,14 +44,14 @@ jobs: id: risk run: | REPORT=$(cat /tmp/changed-files.txt | node scripts/pr-risk-check.mjs --github || true) - echo "report<> $GITHUB_OUTPUT - echo "$REPORT" >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT + echo "report<> "$GITHUB_OUTPUT" + echo "$REPORT" >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" RISK_LEVEL=$(cat /tmp/changed-files.txt | node scripts/pr-risk-check.mjs --json 2>/dev/null \ | node -e "let d=''; process.stdin.on('data',c=>d+=c); process.stdin.on('end',()=>{ try { console.log(JSON.parse(d).risk) } catch { console.log('low') } })" \ || echo "low") - echo "level=$RISK_LEVEL" >> $GITHUB_OUTPUT + echo "level=$RISK_LEVEL" >> "$GITHUB_OUTPUT" - name: Write step summary run: echo "${{ steps.risk.outputs.report }}" >> $GITHUB_STEP_SUMMARY diff --git a/CHANGELOG.md b/CHANGELOG.md index f04feade8..0a12d86fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,41 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## [Unreleased] +## [2.43.0] - 2026-03-23 + +### Added +- **forensics**: opt-in duplicate detection before issue creation (#2105) + +### Fixed +- prevent banner from printing twice on first run (#2251) +- **test**: Windows CI — use double quotes in git commit message (#2252) +- **async-jobs**: suppress duplicate follow-up for awaited job results (#2248) (#2250) +- **gsd**: remove force-staging of .gsd/milestones/ through symlinks (#2247) (#2249) +- **gsd**: remove over-broad skill activation heuristic (#2239) (#2244) +- **auth**: fall through to env/fallback when OAuth credential has no registered provider (#2097) +- **lsp**: bound message buffer and clean up stale client state (#2171) +- clean up macOS numbered .gsd collision variants (#2205) (#2210) +- **search**: keep duplicate-search loop guard armed (#2117) +- clean up extension error listener on session dispose (#2165) +- **web**: resolve 4 pre-existing onboarding contract test failures (#2209) +- async bash job timeout hangs indefinitely instead of erroring out (#2214) +- **gsd**: apply fast service tier outside auto-mode (#2126) +- **interactive**: clean up leaked SIGINT and extension selector listeners (#2172) +- **ci**: standardize GitHub Actions and Node.js versions (#2169) +- **native**: resolve memory leaks in glob, ttsr, and image overflow (#2170) +- extension resource management — prune stale dirs, fix isBuiltIn, gate skills on Skill tool, suppress search warnings (#2235) +- batch isolated fixes — error messages, preferences, web auth, MCP vars, detection, gitignore (#2232) +- document iTerm2 Ctrl+Alt+G keybinding conflict and add helpful hint (#2231) +- **footer**: display active inference model during execution (#1982) +- **web**: kill stale server process before launch to prevent EADDRINUSE (#1934) (#2034) +- **git**: force LC_ALL=C in GIT_NO_PROMPT_ENV to support non-English locales (#2035) +- **forensics**: force gh CLI for issue creation to prevent misrouting (#2067) (#2094) +- force-stage .gsd/milestones/ artifacts when .gsd is a symlink (#2104) (#2112) +- **pi-ai**: correct Copilot context window and output token limits (#2118) + +### Changed +- startup optimizations — pre-compiled extensions, compile cache, batch discovery (#2125) + ## [2.42.0] - 2026-03-22 ### Added @@ -1637,7 +1672,8 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ### Changed - License updated to MIT -[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.42.0...HEAD +[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.43.0...HEAD +[2.43.0]: https://github.com/gsd-build/gsd-2/compare/v2.42.0...v2.43.0 [2.42.0]: https://github.com/gsd-build/gsd-2/compare/v2.41.0...v2.42.0 [2.41.0]: https://github.com/gsd-build/gsd-2/compare/v2.40.0...v2.41.0 [2.40.0]: https://github.com/gsd-build/gsd-2/compare/v2.39.0...v2.40.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index acf637fc2..46690bec6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -11,6 +11,59 @@ Read [VISION.md](VISION.md) before contributing. It defines what GSD-2 is, what 3. **No issue? Create one first** for new features. Bug fixes for obvious problems can skip this step. 4. **Architectural changes require an RFC.** If your change touches core systems (auto-mode, agent-core, orchestration), open an issue describing your approach and get approval before writing code. We use Architecture Decision Records (ADRs) for significant decisions. +## Branching and commits + +Always work on a dedicated branch. Never push directly to `main`. + +**Branch naming:** `/` + +| Type | When to use | +|------|-------------| +| `feat/` | New functionality | +| `fix/` | Bug or defect correction | +| `refactor/` | Code restructuring, no behavior change | +| `test/` | Adding or updating tests | +| `docs/` | Documentation only | +| `chore/` | Dependencies, tooling, housekeeping | +| `ci/` | CI/CD configuration | + +**Commit messages** must follow [Conventional Commits](https://www.conventionalcommits.org/). The commit-msg hook enforces this locally; CI enforces it on push. + +``` +(): +``` + +Valid types: `feat` `fix` `docs` `chore` `refactor` `test` `infra` `ci` `perf` `build` `revert` + +``` +feat(pi-agent-core): add streaming output for long-running tasks +fix(pi-ai): resolve null pointer on empty provider response +chore(deps): bump typescript from 5.3.0 to 5.4.2 +``` + +Keep branches current by rebasing onto `main` — do not merge `main` into your feature branch: + +```bash +git fetch origin +git rebase origin/main +``` + +## Working with GSD (team workflow) + +GSD uses worktree-based isolation for multi-developer work. If you're contributing with GSD running, enable team mode in your project preferences: + +```yaml +# .gsd/preferences.md +--- +version: 1 +mode: team +--- +``` + +This enables unique milestone IDs, branch pushing, and pre-merge checks — preventing milestone ID collisions when multiple contributors run auto-mode simultaneously. Each developer gets their own isolated worktree; squash merges to `main` happen independently. + +For full details see [docs/working-in-teams.md](docs/working-in-teams.md) and [docs/git-strategy.md](docs/git-strategy.md). + ## Opening a pull request ### PR description format @@ -65,10 +118,12 @@ If your PR changes any public API, CLI behavior, config format, or file structur AI-generated PRs are first-class citizens here. We welcome them. We just ask for transparency: -- **Disclose it.** Note that the PR is AI-assisted in your description. +- **Disclose it.** Note that the PR is AI-assisted in your description. Do not credit the AI tool as an author or co-author in the commit or PR. - **Test it.** AI-generated code must be tested to the same standard as human-written code. "The AI said it works" is not a test plan. - **Understand it.** You should be able to explain what the code does and why. If a reviewer asks a question, "I'll ask the AI" is not an answer. +AI agents opening PRs must follow the same workflow as human contributors: clean working tree, new branch per task, CI passing before requesting review. Multi-phase work should start as a Draft PR and only move to Ready when complete. + AI PRs go through the same review process as any other PR. No special treatment in either direction. ## Architecture guidelines @@ -109,6 +164,9 @@ PRs go through automated review first, then human review. To help us review effi # Install dependencies npm ci +# Install git hooks (secret scanning + commit message validation) +npm run secret-scan:install-hook + # Build npm run build @@ -119,6 +177,10 @@ npm test npx tsc --noEmit ``` +Run `npm run secret-scan:install-hook` once after cloning. It installs two hooks: +- **pre-commit** — blocks commits containing hardcoded secrets or credentials +- **commit-msg** — validates Conventional Commits format before the commit lands + CI must pass before your PR will be reviewed. Run these locally to save time. ## Security diff --git a/README.md b/README.md index 99fd5a4fc..085d8ac62 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,29 @@ One command. Walk away. Come back to a built project with clean git history. --- +## What's New in v2.42.0 + +### New Features + +- **Declarative workflow engine** — define YAML workflows that execute through auto-loop, enabling repeatable multi-step automations without code. (#2024) +- **Unified rule registry & event journal** — centralized rule registry, event journal with query tool, and standardized tool naming convention. (#1928) +- **PR risk checker** — CI classifies changed files by system area and surfaces risk level on pull requests. (#1930) +- **`/gsd fast`** — toggle service tier for supported models, enabling prioritized API routing for faster responses. (#1862) +- **Web mode CLI flags** — `--host`, `--port`, and `--allowed-origins` flags give full control over the web server bind address and CORS policy. (#1873) +- **ADR attribution** — architecture decision records now distinguish human, agent, and collaborative authorship. (#1830) + +### Key Fixes + +- **Node v24 web boot** — resolved `ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING` that prevented `gsd --web` from starting on Node v24. (#1864) +- **Worktree health check for all ecosystems** — broadened from JS-only to 17+ ecosystems (Rust, Go, Python, Java, etc.). (#1860) +- **Doctor roadmap atomicity** — roadmap checkbox gating now checks summary on disk, not issue detection, preventing false unchecks. (#1915) +- **Windows path handling** — 8.3 short path resolution, backslash normalization in bash commands, PowerShell browser launch, and parenthesis escaping. (#1960, #1863, #1870, #1872) +- **Auth token persistence** — web UI auth token survives page refreshes via sessionStorage. (#1877) +- **German/non-English locale git errors** — git commands now force `LC_ALL=C` to prevent locale-dependent parse failures. +- **Orphan web server process** — stale web server processes on port 3000 are now cleaned up automatically. + +--- + ## What's New in v2.41.0 ### New Features diff --git a/docs/commands.md b/docs/commands.md index 5826978df..af33718fb 100644 --- a/docs/commands.md +++ b/docs/commands.md @@ -22,6 +22,7 @@ | `/gsd export --html --all` | Generate retrospective reports for all milestones at once | | `/gsd update` | Update GSD to the latest version in-session | | `/gsd knowledge` | Add persistent project knowledge (rule, pattern, or lesson) | +| `/gsd fast` | Toggle service tier for supported models (prioritized API routing) | | `/gsd help` | Categorized command reference with descriptions for all GSD subcommands | ## Configuration & Diagnostics diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index 977a7881a..e588aae87 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -151,6 +151,38 @@ rm -rf "$(dirname .gsd)/.gsd.lock" - If the error persists, close tools that may be holding the file open and then retry. - If repeated failures continue, run `/gsd doctor` to confirm the repo state is still healthy and report the exact path + error code. +### Node v24 web boot failure + +**Symptoms:** `gsd --web` fails with `ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING` on Node v24. + +**Cause:** Node v24 changed type-stripping behavior for `node_modules`, breaking the Next.js web build. + +**Fix:** Fixed in v2.42.0+ (#1864). Upgrade to the latest version. + +### Orphan web server process + +**Symptoms:** `gsd --web` fails because port 3000 is already in use, even though no GSD session is running. + +**Cause:** A previous web server process was not cleaned up on exit. + +**Fix:** Fixed in v2.42.0+. GSD now cleans up stale web server processes automatically. If you're on an older version, kill the orphan process manually: `lsof -ti:3000 | xargs kill`. + +### Non-JS project blocked by worktree health check + +**Symptoms:** Worktree health check fails or blocks auto-mode in projects that don't use Node.js (e.g., Rust, Go, Python). + +**Cause:** The worktree health check only recognized JavaScript ecosystems prior to v2.42.0. + +**Fix:** Fixed in v2.42.0+ (#1860). The health check now supports 17+ ecosystems. Upgrade to the latest version. + +### German/non-English locale git errors + +**Symptoms:** Git commands fail or produce unexpected results when the system locale is non-English (e.g., German). + +**Cause:** GSD parsed git output assuming English locale strings. + +**Fix:** Fixed in v2.42.0+. All git commands now force `LC_ALL=C` to ensure consistent English output regardless of system locale. + ## MCP Client Issues ### `mcp_servers` shows no configured servers @@ -278,6 +310,16 @@ Doctor rebuilds `STATE.md` from plan and roadmap files on disk and fixes detecte - **Forensics:** `/gsd forensics` for structured post-mortem analysis of auto-mode failures - **Session logs:** `.gsd/activity/` contains JSONL session dumps for crash forensics +## iTerm2-Specific Issues + +### Ctrl+Alt shortcuts trigger the wrong action (e.g., Ctrl+Alt+G opens external editor instead of GSD dashboard) + +**Symptoms:** Pressing Ctrl+Alt+G opens the external editor prompt (Ctrl+G) instead of the GSD dashboard. Other Ctrl+Alt shortcuts behave as their Ctrl-only counterparts. + +**Cause:** iTerm2's default Left Option Key setting is "Normal", which swallows the Alt modifier for Ctrl+Alt key combinations. The terminal receives only the Ctrl key, so Ctrl+Alt+G arrives as Ctrl+G. + +**Fix:** In iTerm2, go to **Profiles → Keys → General** and set **Left Option Key** to **Esc+**. This makes Alt/Option send an escape prefix that terminal applications can detect, enabling Ctrl+Alt shortcuts to work correctly. + ## Windows-Specific Issues ### LSP returns ENOENT on Windows (MSYS2/Git Bash) diff --git a/docs/web-interface.md b/docs/web-interface.md index ab2ee0ad1..4899a0280 100644 --- a/docs/web-interface.md +++ b/docs/web-interface.md @@ -7,11 +7,23 @@ GSD includes a browser-based web interface for project management, real-time pro ## Quick Start ```bash -pi --web +gsd --web ``` This starts a local web server and opens the GSD dashboard in your default browser. +### CLI Flags (v2.42.0) + +```bash +gsd --web --host 0.0.0.0 --port 8080 --allowed-origins "https://example.com" +``` + +| Flag | Default | Description | +|------|---------|-------------| +| `--host` | `localhost` | Bind address for the web server | +| `--port` | `3000` | Port for the web server | +| `--allowed-origins` | (none) | Comma-separated list of allowed CORS origins | + ## Features - **Project management** — view milestones, slices, and tasks in a visual dashboard @@ -31,7 +43,7 @@ Key components: ## Configuration -The web server binds to `localhost` by default. No additional configuration is required. +The web server binds to `localhost:3000` by default. Use `--host`, `--port`, and `--allowed-origins` to override (see CLI Flags above). ### Environment Variables @@ -39,6 +51,14 @@ The web server binds to `localhost` by default. No additional configuration is r |----------|-------------| | `GSD_WEB_PROJECT_CWD` | Default project path when `?project=` is not specified | +## Node v24 Compatibility + +Node v24 introduced breaking changes to type stripping that caused `ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING` on web boot. This is fixed in v2.42.0+ (#1864). If you encounter this error, upgrade GSD. + +## Auth Token Persistence + +As of v2.42.0, the web UI persists the auth token in `sessionStorage` so it survives page refreshes (#1877). Previously, refreshing the page required re-authentication. + ## Platform Notes - **Windows**: The web build is skipped on Windows due to Next.js webpack EPERM issues with system directories. The CLI remains fully functional. diff --git a/docs/what-is-pi/18-quick-reference-commands-shortcuts.md b/docs/what-is-pi/18-quick-reference-commands-shortcuts.md index fa6b09ad0..8b195117a 100644 --- a/docs/what-is-pi/18-quick-reference-commands-shortcuts.md +++ b/docs/what-is-pi/18-quick-reference-commands-shortcuts.md @@ -40,6 +40,8 @@ | Alt+Enter (during streaming) | Queue follow-up message | | Alt+Up | Retrieve queued messages | +> **iTerm2 users:** Ctrl+Alt shortcuts (e.g., Ctrl+Alt+G for the GSD dashboard) require Left Option Key set to "Esc+" in Profiles → Keys → General. The default "Normal" setting swallows the Alt modifier. + ### CLI ```bash diff --git a/native/crates/engine/src/glob.rs b/native/crates/engine/src/glob.rs index ed17b5b3c..61be0e1de 100644 --- a/native/crates/engine/src/glob.rs +++ b/native/crates/engine/src/glob.rs @@ -254,7 +254,7 @@ pub fn glob( let ct = task::CancelToken::new(timeout_ms); task::blocking("glob", ct, move |ct| { - run_glob( + let result = run_glob( GlobConfig { root: fs_cache::resolve_search_path(&path)?, include_hidden: hidden.unwrap_or(false), @@ -270,6 +270,10 @@ pub fn glob( }, on_match.as_ref(), ct, - ) + ); + // Explicitly drop the ThreadsafeFunction to release the N-API reference + // immediately rather than relying on implicit drop ordering. + drop(on_match); + result }) } diff --git a/native/crates/engine/src/image.rs b/native/crates/engine/src/image.rs index 22969ef30..7481e9f7e 100644 --- a/native/crates/engine/src/image.rs +++ b/native/crates/engine/src/image.rs @@ -103,31 +103,42 @@ fn decode_image_from_bytes(bytes: &[u8]) -> Result { .map_err(|e| Error::from_reason(format!("Failed to decode image: {e}"))) } +/// Compute a capacity hint for the encode buffer using checked arithmetic. +/// +/// Returns an error instead of panicking when `w * h * bytes_per_pixel` +/// overflows `usize`. +fn encode_capacity(w: u32, h: u32, bytes_per_pixel: usize) -> Result { + (w as usize) + .checked_mul(h as usize) + .and_then(|wh| wh.checked_mul(bytes_per_pixel)) + .ok_or_else(|| Error::from_reason("Image dimensions too large for encode buffer")) +} + fn encode_image(img: &DynamicImage, format: u8, quality: u8) -> Result> { let (w, h) = (img.width(), img.height()); match format { 0 => { - let mut buffer = Vec::with_capacity((w * h * 4) as usize); + let mut buffer = Vec::with_capacity(encode_capacity(w, h, 4)?); img.write_to(&mut Cursor::new(&mut buffer), ImageFormat::Png) .map_err(|e| Error::from_reason(format!("Failed to encode PNG: {e}")))?; Ok(buffer) }, 1 => { - let mut buffer = Vec::with_capacity((w * h * 3) as usize); + let mut buffer = Vec::with_capacity(encode_capacity(w, h, 3)?); let encoder = JpegEncoder::new_with_quality(&mut buffer, quality); img.write_with_encoder(encoder) .map_err(|e| Error::from_reason(format!("Failed to encode JPEG: {e}")))?; Ok(buffer) }, 2 => { - let mut buffer = Vec::with_capacity((w * h * 4) as usize); + let mut buffer = Vec::with_capacity(encode_capacity(w, h, 4)?); let encoder = WebPEncoder::new_lossless(&mut buffer); img.write_with_encoder(encoder) .map_err(|e| Error::from_reason(format!("Failed to encode WebP: {e}")))?; Ok(buffer) }, 3 => { - let mut buffer = Vec::with_capacity((w * h) as usize); + let mut buffer = Vec::with_capacity(encode_capacity(w, h, 1)?); img.write_to(&mut Cursor::new(&mut buffer), ImageFormat::Gif) .map_err(|e| Error::from_reason(format!("Failed to encode GIF: {e}")))?; Ok(buffer) diff --git a/native/crates/engine/src/ttsr.rs b/native/crates/engine/src/ttsr.rs index 571105936..7a513c7c9 100644 --- a/native/crates/engine/src/ttsr.rs +++ b/native/crates/engine/src/ttsr.rs @@ -34,6 +34,15 @@ pub struct NapiTtsrRuleInput { pub conditions: Vec, } +/// Maximum number of live handles allowed before we refuse to allocate more. +/// Prevents unbounded memory growth if JS callers forget to free handles. +const MAX_LIVE_HANDLES: usize = 10_000; + +/// Lock the global STORE, recovering gracefully from mutex poisoning. +fn lock_store() -> std::sync::MutexGuard<'static, HashMap> { + STORE.lock().unwrap_or_else(|e| e.into_inner()) +} + /// Compile a set of TTSR rules into an optimized regex engine. /// /// Returns an opaque numeric handle. Each rule has one or more regex condition @@ -69,10 +78,13 @@ pub fn ttsr_compile_rules(rules: Vec) -> Result { mappings, }; - STORE - .lock() - .map_err(|e| Error::from_reason(format!("Lock poisoned: {e}")))? - .insert(handle, compiled); + let mut store = lock_store(); + if store.len() >= MAX_LIVE_HANDLES { + return Err(Error::from_reason(format!( + "TTSR handle limit reached ({MAX_LIVE_HANDLES}). Free unused handles before compiling more rules." + ))); + } + store.insert(handle, compiled); // Return as f64 since napi BigInt interop is awkward; handles won't exceed 2^53. Ok(handle as f64) @@ -86,9 +98,13 @@ pub fn ttsr_compile_rules(rules: Vec) -> Result { pub fn ttsr_check_buffer(handle: f64, buffer: String) -> Result> { let handle_key = handle as u64; - let store = STORE - .lock() - .map_err(|e| Error::from_reason(format!("Lock poisoned: {e}")))?; + // Bounds-check: reject handles that were never allocated. + let upper_bound = NEXT_HANDLE.load(Ordering::Relaxed); + if handle_key == 0 || handle_key >= upper_bound { + return Err(Error::from_reason(format!("Invalid TTSR handle: {handle}"))); + } + + let store = lock_store(); let compiled = store .get(&handle_key) @@ -114,11 +130,14 @@ pub fn ttsr_check_buffer(handle: f64, buffer: String) -> Result> { #[napi(js_name = "ttsrFreeRules")] pub fn ttsr_free_rules(handle: f64) -> Result<()> { let handle_key = handle as u64; - - STORE - .lock() - .map_err(|e| Error::from_reason(format!("Lock poisoned: {e}")))? - .remove(&handle_key); - + lock_store().remove(&handle_key); Ok(()) } + +/// Free all compiled TTSR rule sets, releasing all memory. +/// +/// Useful for process cleanup or tests that need a fresh state. +#[napi(js_name = "ttsrClearAll")] +pub fn ttsr_clear_all() { + lock_store().clear(); +} diff --git a/packages/pi-ai/src/models.custom.ts b/packages/pi-ai/src/models.custom.ts new file mode 100644 index 000000000..5dd136ac0 --- /dev/null +++ b/packages/pi-ai/src/models.custom.ts @@ -0,0 +1,172 @@ +// Manually-maintained model definitions for providers NOT tracked by models.dev. +// +// The auto-generated file (models.generated.ts) is rebuilt from the models.dev +// third-party catalog. Providers that use proprietary endpoints and are not +// listed on models.dev must be defined here so they survive regeneration. +// +// See: https://github.com/gsd-build/gsd-2/issues/2339 +// +// To add a custom provider: +// 1. Add its model definitions below following the existing pattern. +// 2. Add its API key mapping to env-api-keys.ts. +// 3. Add its provider name to KnownProvider in types.ts (if not already there). + +import type { Model } from "./types.js"; + +export const CUSTOM_MODELS = { + // ─── Alibaba Coding Plan ───────────────────────────────────────────── + // Direct Alibaba DashScope Coding Plan endpoint (OpenAI-compatible). + // NOT the same as alibaba/* models on OpenRouter — different endpoint & auth. + // Original PR: #295 | Fixes: #1003, #1055, #1057 + "alibaba-coding-plan": { + "qwen3.5-plus": { + id: "qwen3.5-plus", + name: "Qwen3.5 Plus", + api: "openai-completions", + provider: "alibaba-coding-plan", + baseUrl: "https://coding-intl.dashscope.aliyuncs.com/v1", + reasoning: true, + input: ["text"], + cost: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 983616, + maxTokens: 65536, + compat: { thinkingFormat: "qwen", supportsDeveloperRole: false }, + } satisfies Model<"openai-completions">, + "qwen3-max-2026-01-23": { + id: "qwen3-max-2026-01-23", + name: "Qwen3 Max 2026-01-23", + api: "openai-completions", + provider: "alibaba-coding-plan", + baseUrl: "https://coding-intl.dashscope.aliyuncs.com/v1", + reasoning: true, + input: ["text"], + cost: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 258048, + maxTokens: 32768, + compat: { thinkingFormat: "qwen", supportsDeveloperRole: false }, + } satisfies Model<"openai-completions">, + "qwen3-coder-next": { + id: "qwen3-coder-next", + name: "Qwen3 Coder Next", + api: "openai-completions", + provider: "alibaba-coding-plan", + baseUrl: "https://coding-intl.dashscope.aliyuncs.com/v1", + reasoning: false, + input: ["text"], + cost: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 204800, + maxTokens: 65536, + compat: { supportsDeveloperRole: false }, + } satisfies Model<"openai-completions">, + "qwen3-coder-plus": { + id: "qwen3-coder-plus", + name: "Qwen3 Coder Plus", + api: "openai-completions", + provider: "alibaba-coding-plan", + baseUrl: "https://coding-intl.dashscope.aliyuncs.com/v1", + reasoning: false, + input: ["text"], + cost: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 997952, + maxTokens: 65536, + compat: { supportsDeveloperRole: false }, + } satisfies Model<"openai-completions">, + "MiniMax-M2.5": { + id: "MiniMax-M2.5", + name: "MiniMax M2.5", + api: "openai-completions", + provider: "alibaba-coding-plan", + baseUrl: "https://coding-intl.dashscope.aliyuncs.com/v1", + reasoning: true, + input: ["text"], + cost: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 196608, + maxTokens: 65536, + compat: { + supportsStore: false, + supportsDeveloperRole: false, + supportsReasoningEffort: true, + maxTokensField: "max_tokens", + }, + } satisfies Model<"openai-completions">, + "glm-5": { + id: "glm-5", + name: "GLM-5", + api: "openai-completions", + provider: "alibaba-coding-plan", + baseUrl: "https://coding-intl.dashscope.aliyuncs.com/v1", + reasoning: true, + input: ["text"], + cost: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 202752, + maxTokens: 16384, + compat: { thinkingFormat: "qwen", supportsDeveloperRole: false }, + } satisfies Model<"openai-completions">, + "glm-4.7": { + id: "glm-4.7", + name: "GLM-4.7", + api: "openai-completions", + provider: "alibaba-coding-plan", + baseUrl: "https://coding-intl.dashscope.aliyuncs.com/v1", + reasoning: true, + input: ["text"], + cost: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 169984, + maxTokens: 16384, + compat: { thinkingFormat: "qwen", supportsDeveloperRole: false }, + } satisfies Model<"openai-completions">, + "kimi-k2.5": { + id: "kimi-k2.5", + name: "Kimi K2.5", + api: "openai-completions", + provider: "alibaba-coding-plan", + baseUrl: "https://coding-intl.dashscope.aliyuncs.com/v1", + reasoning: true, + input: ["text"], + cost: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 258048, + maxTokens: 32768, + compat: { thinkingFormat: "zai", supportsDeveloperRole: false }, + } satisfies Model<"openai-completions">, + }, +} as const; diff --git a/packages/pi-ai/src/models.test.ts b/packages/pi-ai/src/models.test.ts new file mode 100644 index 000000000..a98c32b40 --- /dev/null +++ b/packages/pi-ai/src/models.test.ts @@ -0,0 +1,85 @@ +import { describe, it } from "node:test"; +import assert from "node:assert/strict"; +import { getProviders, getModels, getModel } from "./models.js"; + +// ═══════════════════════════════════════════════════════════════════════════ +// Custom provider preservation (regression: #2339) +// +// Custom providers (like alibaba-coding-plan) are manually maintained and +// NOT sourced from models.dev. They must survive models.generated.ts +// regeneration by living in models.custom.ts. +// ═══════════════════════════════════════════════════════════════════════════ + +describe("model registry — custom providers", () => { + it("alibaba-coding-plan is a registered provider", () => { + const providers = getProviders(); + assert.ok( + providers.includes("alibaba-coding-plan"), + `Expected "alibaba-coding-plan" in providers, got: ${providers.join(", ")}`, + ); + }); + + it("alibaba-coding-plan has all expected models", () => { + const models = getModels("alibaba-coding-plan"); + const ids = models.map((m) => m.id).sort(); + const expected = [ + "MiniMax-M2.5", + "glm-4.7", + "glm-5", + "kimi-k2.5", + "qwen3-coder-next", + "qwen3-coder-plus", + "qwen3-max-2026-01-23", + "qwen3.5-plus", + ]; + assert.deepEqual(ids, expected); + }); + + it("alibaba-coding-plan models use the correct base URL", () => { + const models = getModels("alibaba-coding-plan"); + for (const model of models) { + assert.equal( + model.baseUrl, + "https://coding-intl.dashscope.aliyuncs.com/v1", + `Model ${model.id} has wrong baseUrl: ${model.baseUrl}`, + ); + } + }); + + it("alibaba-coding-plan models use openai-completions API", () => { + const models = getModels("alibaba-coding-plan"); + for (const model of models) { + assert.equal(model.api, "openai-completions", `Model ${model.id} has wrong api: ${model.api}`); + } + }); + + it("alibaba-coding-plan models have provider set correctly", () => { + const models = getModels("alibaba-coding-plan"); + for (const model of models) { + assert.equal( + model.provider, + "alibaba-coding-plan", + `Model ${model.id} has wrong provider: ${model.provider}`, + ); + } + }); + + it("getModel retrieves alibaba-coding-plan models by provider+id", () => { + // Use type assertion to test runtime behavior — alibaba-coding-plan may come + // from custom models rather than the generated file, so the narrow + // GeneratedProvider type doesn't include it until models.custom.ts is merged. + const model = getModel("alibaba-coding-plan" as any, "qwen3.5-plus" as any); + assert.ok(model, "Expected getModel to return a model for alibaba-coding-plan/qwen3.5-plus"); + assert.equal(model.id, "qwen3.5-plus"); + assert.equal(model.provider, "alibaba-coding-plan"); + }); +}); + +describe("model registry — custom models do not collide with generated models", () => { + it("generated providers still exist alongside custom providers", () => { + const providers = getProviders(); + // Spot-check a few generated providers + assert.ok(providers.includes("openai"), "openai should be in providers"); + assert.ok(providers.includes("anthropic"), "anthropic should be in providers"); + }); +}); diff --git a/packages/pi-ai/src/models.ts b/packages/pi-ai/src/models.ts index 8a4805ac1..ee488fbec 100644 --- a/packages/pi-ai/src/models.ts +++ b/packages/pi-ai/src/models.ts @@ -1,9 +1,10 @@ import { MODELS } from "./models.generated.js"; +import { CUSTOM_MODELS } from "./models.custom.js"; import type { Api, KnownProvider, Model, Usage } from "./types.js"; const modelRegistry: Map>> = new Map(); -// Initialize registry from MODELS on module load +// Initialize registry from auto-generated MODELS (models.dev catalog) for (const [provider, models] of Object.entries(MODELS)) { const providerModels = new Map>(); for (const [id, model] of Object.entries(models)) { @@ -12,6 +13,21 @@ for (const [provider, models] of Object.entries(MODELS)) { modelRegistry.set(provider, providerModels); } +// Merge manually-maintained custom providers that are NOT in models.dev. +// Custom models are additive — they never overwrite generated entries. +// See: https://github.com/gsd-build/gsd-2/issues/2339 +for (const [provider, models] of Object.entries(CUSTOM_MODELS)) { + if (!modelRegistry.has(provider)) { + modelRegistry.set(provider, new Map>()); + } + const providerModels = modelRegistry.get(provider)!; + for (const [id, model] of Object.entries(models)) { + if (!providerModels.has(id)) { + providerModels.set(id, model as Model); + } + } +} + /** Providers that have entries in the generated MODELS constant */ type GeneratedProvider = keyof typeof MODELS & KnownProvider; diff --git a/packages/pi-coding-agent/package.json b/packages/pi-coding-agent/package.json index 4ab8018f1..3006b9a1c 100644 --- a/packages/pi-coding-agent/package.json +++ b/packages/pi-coding-agent/package.json @@ -1,6 +1,6 @@ { "name": "@gsd/pi-coding-agent", - "version": "2.42.0", + "version": "2.43.0", "description": "Coding agent CLI (vendored from pi-mono)", "type": "module", "piConfig": { diff --git a/packages/pi-coding-agent/src/core/agent-session.ts b/packages/pi-coding-agent/src/core/agent-session.ts index 03389954f..c300fc20f 100644 --- a/packages/pi-coding-agent/src/core/agent-session.ts +++ b/packages/pi-coding-agent/src/core/agent-session.ts @@ -255,6 +255,10 @@ export class AgentSession { private _cumulativeOutputTokens = 0; private _cumulativeToolCalls = 0; + /** Cost of the most recent assistant response (for per-prompt display). */ + private _lastTurnCost = 0; + + // Bash execution state private _bashAbortController: AbortController | undefined = undefined; private _pendingBashMessages: BashExecutionMessage[] = []; @@ -454,6 +458,7 @@ export class AgentSession { // Accumulate session stats that survive compaction (#1423) const assistantMsg = event.message as AssistantMessage; + this._lastTurnCost = assistantMsg.usage?.cost?.total ?? 0; this._cumulativeCost += assistantMsg.usage?.cost?.total ?? 0; this._cumulativeInputTokens += assistantMsg.usage?.input ?? 0; this._cumulativeOutputTokens += assistantMsg.usage?.output ?? 0; @@ -687,6 +692,8 @@ export class AgentSession { * Call this when completely done with the session. */ dispose(): void { + this._extensionErrorUnsubscriber?.(); + this._extensionErrorUnsubscriber = undefined; this._disconnectFromAgent(); this._eventListeners = []; } @@ -1928,7 +1935,11 @@ export class AgentSession { runner.setUIContext(this._extensionUIContext); runner.bindCommandContext(this._extensionCommandContextActions); - this._extensionErrorUnsubscriber?.(); + try { + this._extensionErrorUnsubscriber?.(); + } catch { + // Ignore errors from previous unsubscriber + } this._extensionErrorUnsubscriber = this._extensionErrorListener ? runner.onError(this._extensionErrorListener) : undefined; @@ -2774,6 +2785,14 @@ export class AgentSession { }; } + /** + * Get the cost of the most recent assistant response. + * Returns 0 if no assistant message has been received yet. + */ + getLastTurnCost(): number { + return this._lastTurnCost; + } + getContextUsage(): ContextUsage | undefined { const model = this.model; if (!model) return undefined; diff --git a/packages/pi-coding-agent/src/core/auth-storage.test.ts b/packages/pi-coding-agent/src/core/auth-storage.test.ts index f91947ca9..74020a4ec 100644 --- a/packages/pi-coding-agent/src/core/auth-storage.test.ts +++ b/packages/pi-coding-agent/src/core/auth-storage.test.ts @@ -263,6 +263,74 @@ describe("AuthStorage — areAllCredentialsBackedOff", () => { }); }); +// ─── mismatched oauth credential for non-OAuth provider (#2083) ─────────────── + +describe("AuthStorage — oauth credential for non-OAuth provider (#2083)", () => { + it("returns undefined when openrouter has type:oauth (no registered OAuth provider)", async () => { + // Simulates the bug: OpenRouter credential stored as type:"oauth" + // but OpenRouter is not a registered OAuth provider. + const storage = inMemory({ + openrouter: { + type: "oauth", + access_token: "sk-or-v1-fake", + refresh_token: "rt-fake", + expires: Date.now() + 3_600_000, + }, + }); + + // Before the fix, getApiKey returns undefined because + // resolveCredentialApiKey calls getOAuthProvider("openrouter") → null → undefined. + // The key in the oauth credential is never extracted. + const key = await storage.getApiKey("openrouter"); + // After the fix, the oauth credential with an unrecognised provider + // should be skipped, and getApiKey should fall through to env / fallback. + assert.equal(key, undefined); + }); + + it("falls through to env var when openrouter has type:oauth credential", async () => { + const storage = inMemory({ + openrouter: { + type: "oauth", + access_token: "sk-or-v1-fake", + refresh_token: "rt-fake", + expires: Date.now() + 3_600_000, + }, + }); + + // Simulate OPENROUTER_API_KEY being set via env + const origEnv = process.env.OPENROUTER_API_KEY; + try { + process.env.OPENROUTER_API_KEY = "sk-or-v1-env-key"; + const key = await storage.getApiKey("openrouter"); + assert.equal(key, "sk-or-v1-env-key"); + } finally { + if (origEnv === undefined) { + delete process.env.OPENROUTER_API_KEY; + } else { + process.env.OPENROUTER_API_KEY = origEnv; + } + } + }); + + it("falls through to fallback resolver when openrouter has type:oauth credential", async () => { + const storage = inMemory({ + openrouter: { + type: "oauth", + access_token: "sk-or-v1-fake", + refresh_token: "rt-fake", + expires: Date.now() + 3_600_000, + }, + }); + + storage.setFallbackResolver((provider) => + provider === "openrouter" ? "sk-or-v1-fallback" : undefined, + ); + + const key = await storage.getApiKey("openrouter"); + assert.equal(key, "sk-or-v1-fallback"); + }); +}); + // ─── getAll truncation ──────────────────────────────────────────────────────── describe("AuthStorage — getAll()", () => { diff --git a/packages/pi-coding-agent/src/core/auth-storage.ts b/packages/pi-coding-agent/src/core/auth-storage.ts index c632090a7..5ae286177 100644 --- a/packages/pi-coding-agent/src/core/auth-storage.ts +++ b/packages/pi-coding-agent/src/core/auth-storage.ts @@ -756,9 +756,12 @@ export class AuthStorage { if (credentials.length > 0) { const index = this.selectCredentialIndex(providerId, credentials, sessionId); if (index >= 0) { - return this.resolveCredentialApiKey(providerId, credentials[index]); + const resolved = await this.resolveCredentialApiKey(providerId, credentials[index]); + if (resolved) return resolved; + // Credential unresolvable (e.g. type:"oauth" for a non-OAuth provider) — + // fall through to env / fallback instead of returning undefined (#2083) } - // All credentials backed off - fall through to env/fallback + // All credentials backed off or unresolvable - fall through to env/fallback } // Fall back to environment variable diff --git a/packages/pi-coding-agent/src/core/extensions/loader.ts b/packages/pi-coding-agent/src/core/extensions/loader.ts index 88272e87b..396ba9e9a 100644 --- a/packages/pi-coding-agent/src/core/extensions/loader.ts +++ b/packages/pi-coding-agent/src/core/extensions/loader.ts @@ -569,6 +569,24 @@ function createExtensionAPI( } async function loadExtensionModule(extensionPath: string) { + // Pre-compiled extension loading: if the source is .ts and a sibling .js + // file exists with matching or newer mtime, use native import() to skip + // jiti JIT compilation entirely. This is the biggest startup win for + // bundled extensions that have already been built. + if (extensionPath.endsWith(".ts")) { + const jsPath = extensionPath.replace(/\.ts$/, ".js"); + try { + const [tsStat, jsStat] = [fs.statSync(extensionPath), fs.statSync(jsPath)]; + if (jsStat.mtimeMs >= tsStat.mtimeMs) { + const module = await import(jsPath); + const factory = (module.default ?? module) as ExtensionFactory; + return typeof factory !== "function" ? undefined : factory; + } + } catch { + // .js file doesn't exist or stat failed — fall through to jiti + } + } + const jiti = createJiti(import.meta.url, { moduleCache: false, ...getJitiOptions(), diff --git a/packages/pi-coding-agent/src/core/lsp/client.ts b/packages/pi-coding-agent/src/core/lsp/client.ts index 930dc8374..400b2beb0 100644 --- a/packages/pi-coding-agent/src/core/lsp/client.ts +++ b/packages/pi-coding-agent/src/core/lsp/client.ts @@ -24,11 +24,25 @@ const clients = new Map(); const clientLocks = new Map>(); const fileOperationLocks = new Map>(); +/** Track stream listeners per client so they can be removed on shutdown. */ +interface StreamHandlers { + stdoutData?: (chunk: Buffer) => void; + stdoutEnd?: () => void; + stdoutError?: () => void; + stderrData?: (chunk: Buffer) => void; + stderrEnd?: () => void; + stderrError?: () => void; +} +const clientStreamHandlers = new Map(); + // Idle timeout configuration (disabled by default) let idleTimeoutMs: number | null = null; let idleCheckInterval: ReturnType | null = null; const IDLE_CHECK_INTERVAL_MS = 60 * 1000; +/** Maximum allowed size for the message buffer (10 MB). */ +const MAX_MESSAGE_BUFFER_SIZE = 10 * 1024 * 1024; + /** * Configure the idle timeout for LSP clients. */ @@ -52,6 +66,10 @@ function startIdleChecker(): void { shutdownClient(key); } } + // Stop the checker if there are no more clients to monitor + if (clients.size === 0) { + stopIdleChecker(); + } }, IDLE_CHECK_INTERVAL_MS); } @@ -250,8 +268,21 @@ async function startMessageReader(client: LspClient): Promise { } return new Promise((resolve) => { - stdout.on("data", async (chunk: Buffer) => { + const handlers = clientStreamHandlers.get(client.name) ?? {}; + + handlers.stdoutData = async (chunk: Buffer) => { const currentBuffer: Buffer = Buffer.concat([client.messageBuffer, chunk]); + + if (currentBuffer.length > MAX_MESSAGE_BUFFER_SIZE) { + if (process.env.DEBUG) { + console.error( + `[lsp] Message buffer exceeded ${MAX_MESSAGE_BUFFER_SIZE} bytes (${currentBuffer.length}), discarding`, + ); + } + client.messageBuffer = Buffer.alloc(0); + return; + } + client.messageBuffer = currentBuffer; let workingBuffer = currentBuffer; @@ -289,17 +320,22 @@ async function startMessageReader(client: LspClient): Promise { } client.messageBuffer = workingBuffer; - }); + }; + stdout.on("data", handlers.stdoutData); - stdout.on("end", () => { + handlers.stdoutEnd = () => { client.isReading = false; resolve(); - }); + }; + stdout.on("end", handlers.stdoutEnd); - stdout.on("error", () => { + handlers.stdoutError = () => { client.isReading = false; resolve(); - }); + }; + stdout.on("error", handlers.stdoutError); + + clientStreamHandlers.set(client.name, handlers); }); } @@ -384,21 +420,28 @@ async function startStderrReader(client: LspClient): Promise { if (!stderr) return; return new Promise((resolve) => { - stderr.on("data", (chunk: Buffer) => { + const handlers = clientStreamHandlers.get(client.name) ?? {}; + + handlers.stderrData = (chunk: Buffer) => { const text = chunk.toString("utf-8"); client.stderrBuffer += text; if (client.stderrBuffer.length > 4096) { client.stderrBuffer = client.stderrBuffer.slice(-4096); } - }); + }; + stderr.on("data", handlers.stderrData); - stderr.on("end", () => { + handlers.stderrEnd = () => { resolve(); - }); + }; + stderr.on("end", handlers.stderrEnd); - stderr.on("error", () => { + handlers.stderrError = () => { resolve(); - }); + }; + stderr.on("error", handlers.stderrError); + + clientStreamHandlers.set(client.name, handlers); }); } @@ -688,6 +731,23 @@ export function notifyFileChanged(filePath: string): void { } } +/** + * Remove stdout/stderr stream listeners for a client to prevent leaks. + */ +function removeStreamHandlers(client: LspClient): void { + const handlers = clientStreamHandlers.get(client.name); + if (!handlers) return; + + if (handlers.stdoutData) client.proc.stdout?.removeListener("data", handlers.stdoutData); + if (handlers.stdoutEnd) client.proc.stdout?.removeListener("end", handlers.stdoutEnd); + if (handlers.stdoutError) client.proc.stdout?.removeListener("error", handlers.stdoutError); + if (handlers.stderrData) client.proc.stderr?.removeListener("data", handlers.stderrData); + if (handlers.stderrEnd) client.proc.stderr?.removeListener("end", handlers.stderrEnd); + if (handlers.stderrError) client.proc.stderr?.removeListener("error", handlers.stderrError); + + clientStreamHandlers.delete(client.name); +} + /** * Shutdown a specific client by key. */ @@ -702,12 +762,23 @@ function shutdownClient(key: string): void { sendRequest(client, "shutdown", null).catch(() => {}); + // Remove stream listeners before killing the process + removeStreamHandlers(client); + try { killProcessTree(client.proc.pid); } catch { client.proc.kill(); } clients.delete(key); + clientLocks.delete(key); + + // Clean up any file operation locks associated with this client + for (const lockKey of Array.from(fileOperationLocks.keys())) { + if (lockKey.startsWith(`${key}:`)) { + fileOperationLocks.delete(lockKey); + } + } } // ============================================================================= @@ -822,6 +893,9 @@ async function sendNotification(client: LspClient, method: string, params: unkno function shutdownAll(): void { const clientsToShutdown = Array.from(clients.values()); clients.clear(); + clientLocks.clear(); + fileOperationLocks.clear(); + stopIdleChecker(); const err = new Error("LSP client shutdown"); for (const client of clientsToShutdown) { @@ -831,6 +905,9 @@ function shutdownAll(): void { pending.reject(err); } + // Remove stream listeners before killing the process + removeStreamHandlers(client); + void (async () => { const timeout = new Promise(resolve => setTimeout(resolve, 5_000)); const result = sendRequest(client, "shutdown", null).catch(() => {}); @@ -864,14 +941,28 @@ export function getActiveClients(): LspServerStatus[] { // Process Cleanup // ============================================================================= +const _beforeExitHandler = () => shutdownAll(); +const _sigintHandler = () => { + shutdownAll(); + process.exit(0); +}; +const _sigtermHandler = () => { + shutdownAll(); + process.exit(0); +}; + if (typeof process !== "undefined") { - process.on("beforeExit", shutdownAll); - process.on("SIGINT", () => { - shutdownAll(); - process.exit(0); - }); - process.on("SIGTERM", () => { - shutdownAll(); - process.exit(0); - }); + process.on("beforeExit", _beforeExitHandler); + process.on("SIGINT", _sigintHandler); + process.on("SIGTERM", _sigtermHandler); +} + +/** + * Remove process-level signal handlers registered at module load. + * Call this during graceful teardown to prevent leaked listeners. + */ +export function removeProcessHandlers(): void { + process.off("beforeExit", _beforeExitHandler); + process.off("SIGINT", _sigintHandler); + process.off("SIGTERM", _sigtermHandler); } diff --git a/packages/pi-coding-agent/src/core/package-manager.ts b/packages/pi-coding-agent/src/core/package-manager.ts index 44209e04f..d29c44ca5 100644 --- a/packages/pi-coding-agent/src/core/package-manager.ts +++ b/packages/pi-coding-agent/src/core/package-manager.ts @@ -1562,6 +1562,26 @@ export class DefaultPackageManager implements PackageManager { } } + /** + * Batch-discover which resource subdirectories exist under a parent dir. + * A single readdirSync replaces 4 separate existsSync probes, reducing + * syscalls during startup. + */ + private discoverResourceSubdirs(baseDir: string): Set { + try { + const entries = readdirSync(baseDir, { withFileTypes: true }); + const names = new Set(); + for (const e of entries) { + if (e.isDirectory() || e.isSymbolicLink()) { + names.add(e.name); + } + } + return names; + } catch { + return new Set(); + } + } + private addAutoDiscoveredResources( accumulator: ResourceAccumulator, globalSettings: ReturnType, @@ -1595,6 +1615,11 @@ export class DefaultPackageManager implements PackageManager { themes: (projectSettings.themes ?? []) as string[], }; + // Batch directory discovery: one readdir of each parent replaces up to + // 4 separate existsSync calls per base directory, cutting syscalls. + const projectSubdirs = this.discoverResourceSubdirs(projectBaseDir); + const userSubdirs = this.discoverResourceSubdirs(globalBaseDir); + const userDirs = { extensions: join(globalBaseDir, "extensions"), skills: join(globalBaseDir, "skills"), @@ -1626,66 +1651,82 @@ export class DefaultPackageManager implements PackageManager { } }; - addResources( - "extensions", - collectAutoExtensionEntries(projectDirs.extensions), - projectMetadata, - projectOverrides.extensions, - projectBaseDir, - ); - addResources( - "skills", - [ - ...collectAutoSkillEntries(projectDirs.skills), + // Project resources — skip collect calls when the parent readdir shows + // the subdirectory doesn't exist (avoids redundant existsSync + readdirSync). + if (projectSubdirs.has("extensions")) { + addResources( + "extensions", + collectAutoExtensionEntries(projectDirs.extensions), + projectMetadata, + projectOverrides.extensions, + projectBaseDir, + ); + } + { + const skillEntries = [ + ...(projectSubdirs.has("skills") ? collectAutoSkillEntries(projectDirs.skills) : []), ...projectAgentsSkillDirs.flatMap((dir) => collectAutoSkillEntries(dir)), - ], - projectMetadata, - projectOverrides.skills, - projectBaseDir, - ); - addResources( - "prompts", - collectAutoPromptEntries(projectDirs.prompts), - projectMetadata, - projectOverrides.prompts, - projectBaseDir, - ); - addResources( - "themes", - collectAutoThemeEntries(projectDirs.themes), - projectMetadata, - projectOverrides.themes, - projectBaseDir, - ); + ]; + if (skillEntries.length > 0) { + addResources("skills", skillEntries, projectMetadata, projectOverrides.skills, projectBaseDir); + } + } + if (projectSubdirs.has("prompts")) { + addResources( + "prompts", + collectAutoPromptEntries(projectDirs.prompts), + projectMetadata, + projectOverrides.prompts, + projectBaseDir, + ); + } + if (projectSubdirs.has("themes")) { + addResources( + "themes", + collectAutoThemeEntries(projectDirs.themes), + projectMetadata, + projectOverrides.themes, + projectBaseDir, + ); + } - addResources( - "extensions", - collectAutoExtensionEntries(userDirs.extensions), - userMetadata, - userOverrides.extensions, - globalBaseDir, - ); - addResources( - "skills", - [...collectAutoSkillEntries(userDirs.skills), ...collectAutoSkillEntries(userAgentsSkillsDir)], - userMetadata, - userOverrides.skills, - globalBaseDir, - ); - addResources( - "prompts", - collectAutoPromptEntries(userDirs.prompts), - userMetadata, - userOverrides.prompts, - globalBaseDir, - ); - addResources( - "themes", - collectAutoThemeEntries(userDirs.themes), - userMetadata, - userOverrides.themes, - globalBaseDir, - ); + // User (global) resources + if (userSubdirs.has("extensions")) { + addResources( + "extensions", + collectAutoExtensionEntries(userDirs.extensions), + userMetadata, + userOverrides.extensions, + globalBaseDir, + ); + } + { + const skillEntries = [ + ...(userSubdirs.has("skills") ? collectAutoSkillEntries(userDirs.skills) : []), + ...collectAutoSkillEntries(userAgentsSkillsDir), + ]; + if (skillEntries.length > 0) { + addResources("skills", skillEntries, userMetadata, userOverrides.skills, globalBaseDir); + } + } + if (userSubdirs.has("prompts")) { + addResources( + "prompts", + collectAutoPromptEntries(userDirs.prompts), + userMetadata, + userOverrides.prompts, + globalBaseDir, + ); + } + if (userSubdirs.has("themes")) { + addResources( + "themes", + collectAutoThemeEntries(userDirs.themes), + userMetadata, + userOverrides.themes, + globalBaseDir, + ); + } } private collectFilesFromPaths(paths: string[], resourceType: ResourceType): string[] { diff --git a/packages/pi-coding-agent/src/core/resource-loader.ts b/packages/pi-coding-agent/src/core/resource-loader.ts index c8c1c048c..6eb040829 100644 --- a/packages/pi-coding-agent/src/core/resource-loader.ts +++ b/packages/pi-coding-agent/src/core/resource-loader.ts @@ -1,6 +1,6 @@ import { existsSync, readdirSync, readFileSync, statSync } from "node:fs"; import { homedir } from "node:os"; -import { join, resolve, sep } from "node:path"; +import { basename, dirname, join, resolve, sep } from "node:path"; import chalk from "chalk"; import { CONFIG_DIR_NAME, getAgentDir } from "../config.js"; import { loadThemeFromPath, type Theme } from "../modes/interactive/theme/theme.js"; @@ -127,6 +127,8 @@ export interface DefaultResourceLoaderOptions { noThemes?: boolean; systemPrompt?: string; appendSystemPrompt?: string; + /** Names of bundled extensions (used to identify built-in extensions in conflict detection). */ + bundledExtensionNames?: Set; extensionsOverride?: (base: LoadExtensionsResult) => LoadExtensionsResult; skillsOverride?: (base: { skills: Skill[]; diagnostics: ResourceDiagnostic[] }) => { skills: Skill[]; @@ -164,6 +166,7 @@ export class DefaultResourceLoader implements ResourceLoader { private noThemes: boolean; private systemPromptSource?: string; private appendSystemPromptSource?: string; + private bundledExtensionNames: Set; private extensionsOverride?: (base: LoadExtensionsResult) => LoadExtensionsResult; private skillsOverride?: (base: { skills: Skill[]; diagnostics: ResourceDiagnostic[] }) => { skills: Skill[]; @@ -219,6 +222,7 @@ export class DefaultResourceLoader implements ResourceLoader { this.noThemes = options.noThemes ?? false; this.systemPromptSource = options.systemPrompt; this.appendSystemPromptSource = options.appendSystemPrompt; + this.bundledExtensionNames = options.bundledExtensionNames ?? new Set(); this.extensionsOverride = options.extensionsOverride; this.skillsOverride = options.skillsOverride; this.promptsOverride = options.promptsOverride; @@ -790,6 +794,19 @@ export class DefaultResourceLoader implements ResourceLoader { return target.startsWith(prefix); } + /** + * Extract the extension name from its path. + * For root-level files: basename without extension (e.g. "search-the-web.ts" → "search-the-web") + * For subdirectory extensions: the directory name (e.g. "/path/to/gsd/index.ts" → "gsd") + */ + private getExtensionNameFromPath(extPath: string): string { + const base = basename(extPath); + if (base === "index.js" || base === "index.ts") { + return basename(dirname(extPath)); + } + return base.replace(/\.(?:ts|js)$/, ""); + } + private detectExtensionConflicts(extensions: Extension[]): Array<{ path: string; message: string }> { const conflicts: Array<{ path: string; message: string }> = []; @@ -803,9 +820,10 @@ export class DefaultResourceLoader implements ResourceLoader { for (const toolName of ext.tools.keys()) { const existingOwner = toolOwners.get(toolName); if (existingOwner && existingOwner !== ext.path) { - // Determine if the existing owner is a built-in (not a user extension) - const isBuiltIn = !existingOwner.includes("/.gsd/agent/extensions/") && - !existingOwner.includes("/.gsd/extensions/"); + // Determine if the existing owner is a bundled extension by checking + // its name against the canonical bundled extensions list + const ownerName = this.getExtensionNameFromPath(existingOwner); + const isBuiltIn = this.bundledExtensionNames.has(ownerName); const hint = isBuiltIn ? ` (built-in tool supersedes — consider removing ${ext.path})` : ""; @@ -822,8 +840,8 @@ export class DefaultResourceLoader implements ResourceLoader { for (const commandName of ext.commands.keys()) { const existingOwner = commandOwners.get(commandName); if (existingOwner && existingOwner !== ext.path) { - const isBuiltIn = !existingOwner.includes("/.gsd/agent/extensions/") && - !existingOwner.includes("/.gsd/extensions/"); + const ownerName = this.getExtensionNameFromPath(existingOwner); + const isBuiltIn = this.bundledExtensionNames.has(ownerName); const hint = isBuiltIn ? ` (built-in command supersedes — consider removing ${ext.path})` : ""; diff --git a/packages/pi-coding-agent/src/core/system-prompt.ts b/packages/pi-coding-agent/src/core/system-prompt.ts index 310aa9593..f837ae349 100644 --- a/packages/pi-coding-agent/src/core/system-prompt.ts +++ b/packages/pi-coding-agent/src/core/system-prompt.ts @@ -84,9 +84,9 @@ export function buildSystemPrompt(options: BuildSystemPromptOptions = {}): strin } } - // Append skills section (only if read tool is available) - const customPromptHasRead = !selectedTools || selectedTools.includes("read"); - if (customPromptHasRead && skills.length > 0) { + // Append skills section (if read or Skill tool is available) + const customPromptHasSkillAccess = !selectedTools || selectedTools.includes("read") || selectedTools.includes("Skill"); + if (customPromptHasSkillAccess && skills.length > 0) { prompt += formatSkillsForPrompt(skills); } @@ -232,8 +232,9 @@ Pi documentation (read only when the user asks about pi itself, its SDK, extensi } } - // Append skills section (only if read tool is available) - if (hasRead && skills.length > 0) { + // Append skills section (if read or Skill tool is available) + const hasSkill = tools.includes("Skill"); + if ((hasRead || hasSkill) && skills.length > 0) { prompt += formatSkillsForPrompt(skills); } diff --git a/packages/pi-coding-agent/src/modes/interactive/components/extension-editor.ts b/packages/pi-coding-agent/src/modes/interactive/components/extension-editor.ts index f0a9eae8b..0b05c3ada 100644 --- a/packages/pi-coding-agent/src/modes/interactive/components/extension-editor.ts +++ b/packages/pi-coding-agent/src/modes/interactive/components/extension-editor.ts @@ -113,6 +113,9 @@ export class ExtensionEditorComponent extends Container implements Focusable { private openExternalEditor(): void { const editorCmd = process.env.VISUAL || process.env.EDITOR; if (!editorCmd) { + // No editor configured — nothing to do. + // The main interactive-mode handler shows a warning with an iTerm2 hint; + // this component is a secondary editor so we silently bail. return; } diff --git a/packages/pi-coding-agent/src/modes/interactive/components/footer.ts b/packages/pi-coding-agent/src/modes/interactive/components/footer.ts index 5b4456baa..6a1c49d43 100644 --- a/packages/pi-coding-agent/src/modes/interactive/components/footer.ts +++ b/packages/pi-coding-agent/src/modes/interactive/components/footer.ts @@ -26,6 +26,18 @@ function formatTokens(count: number): string { return `${Math.round(count / 1000000)}M`; } +/** + * Format a cost value for compact display. + * Uses fewer decimal places for larger amounts. + * @internal Exported for testing only. + */ +export function formatPromptCost(cost: number): string { + if (cost < 0.001) return `$${cost.toFixed(4)}`; + if (cost < 0.01) return `$${cost.toFixed(3)}`; + if (cost < 1) return `$${cost.toFixed(3)}`; + return `$${cost.toFixed(2)}`; +} + /** * Footer component that shows pwd, token stats, and context usage. * Computes token/context stats from session, gets git branch and extension statuses from provider. @@ -112,6 +124,14 @@ export class FooterComponent implements Component { statsParts.push(costStr); } + // Per-prompt cost annotation (opt-in via show_token_cost preference, #1515) + if (process.env.GSD_SHOW_TOKEN_COST === "1") { + const lastTurnCost = this.session.getLastTurnCost(); + if (lastTurnCost > 0) { + statsParts.push(`(last: ${formatPromptCost(lastTurnCost)})`); + } + } + // Colorize context percentage based on usage let contextPercentStr: string; const autoIndicator = this.autoCompactEnabled ? " (auto)" : ""; diff --git a/packages/pi-coding-agent/src/modes/interactive/interactive-mode.ts b/packages/pi-coding-agent/src/modes/interactive/interactive-mode.ts index cd9550f12..2f0beb331 100644 --- a/packages/pi-coding-agent/src/modes/interactive/interactive-mode.ts +++ b/packages/pi-coding-agent/src/modes/interactive/interactive-mode.ts @@ -1519,6 +1519,13 @@ export class InteractiveMode { options: string[], opts?: ExtensionUIDialogOptions, ): Promise { + // If a previous selector is still active, dispose it before creating a + // new one. This avoids leaking the previous promise and DOM state when + // showExtensionSelector is called rapidly. + if (this.extensionSelector) { + this.hideExtensionSelector(); + } + return new Promise((resolve) => { if (opts?.signal?.aborted) { resolve(undefined); @@ -2331,18 +2338,24 @@ export class InteractiveMode { const ignoreSigint = () => {}; process.on("SIGINT", ignoreSigint); - // Set up handler to restore TUI when resumed - process.once("SIGCONT", () => { + try { + // Set up handler to restore TUI when resumed + process.once("SIGCONT", () => { + process.removeListener("SIGINT", ignoreSigint); + this.ui.start(); + this.ui.requestRender(true); + }); + + // Stop the TUI (restore terminal to normal mode) + this.ui.stop(); + + // Send SIGTSTP to process group (pid=0 means all processes in group) + process.kill(0, "SIGTSTP"); + } catch { + // If suspend fails (e.g. SIGTSTP not supported), ensure the + // SIGINT listener doesn't leak. process.removeListener("SIGINT", ignoreSigint); - this.ui.start(); - this.ui.requestRender(true); - }); - - // Stop the TUI (restore terminal to normal mode) - this.ui.stop(); - - // Send SIGTSTP to process group (pid=0 means all processes in group) - process.kill(0, "SIGTSTP"); + } } private async handleFollowUp(): Promise { @@ -2460,7 +2473,14 @@ export class InteractiveMode { // Determine editor (respect $VISUAL, then $EDITOR) const editorCmd = process.env.VISUAL || process.env.EDITOR; if (!editorCmd) { - this.showWarning("No editor configured. Set $VISUAL or $EDITOR environment variable."); + let msg = "No editor configured. Set $VISUAL or $EDITOR environment variable."; + if (process.env.TERM_PROGRAM === "iTerm.app") { + msg += + "\n\nTip: If you meant to open the GSD dashboard (Ctrl+Alt+G), set Left Option Key to" + + " \"Esc+\" in iTerm2 → Profiles → Keys. With the default \"Normal\" setting," + + " Ctrl+Alt+G sends Ctrl+G instead."; + } + this.showWarning(msg); return; } diff --git a/packages/pi-coding-agent/src/modes/interactive/theme/theme.ts b/packages/pi-coding-agent/src/modes/interactive/theme/theme.ts index db1a524a0..763b22734 100644 --- a/packages/pi-coding-agent/src/modes/interactive/theme/theme.ts +++ b/packages/pi-coding-agent/src/modes/interactive/theme/theme.ts @@ -663,7 +663,7 @@ function setGlobalTheme(t: Theme): void { let currentThemeName: string | undefined; let themeWatcher: fs.FSWatcher | undefined; -let onThemeChangeCallback: (() => void) | undefined; +const onThemeChangeCallbacks = new Set<() => void>(); const registeredThemes = new Map(); export function setRegisteredThemes(themes: Theme[]): void { @@ -698,9 +698,7 @@ export function setTheme(name: string, enableWatcher: boolean = false): { succes if (enableWatcher) { startThemeWatcher(); } - if (onThemeChangeCallback) { - onThemeChangeCallback(); - } + onThemeChangeCallbacks.forEach(cb => cb()); return { success: true }; } catch (error) { // Theme is invalid - fall back to dark theme @@ -718,13 +716,12 @@ export function setThemeInstance(themeInstance: Theme): void { setGlobalTheme(themeInstance); currentThemeName = ""; stopThemeWatcher(); // Can't watch a direct instance - if (onThemeChangeCallback) { - onThemeChangeCallback(); - } + onThemeChangeCallbacks.forEach(cb => cb()); } -export function onThemeChange(callback: () => void): void { - onThemeChangeCallback = callback; +export function onThemeChange(callback: () => void): () => void { + onThemeChangeCallbacks.add(callback); + return () => { onThemeChangeCallbacks.delete(callback); }; } function startThemeWatcher(): void { @@ -755,10 +752,8 @@ function startThemeWatcher(): void { try { // Reload the theme setGlobalTheme(loadTheme(currentThemeName!)); - // Notify callback (to invalidate UI) - if (onThemeChangeCallback) { - onThemeChangeCallback(); - } + // Notify callbacks (to invalidate UI) + onThemeChangeCallbacks.forEach(cb => cb()); } catch (_error) { // Ignore errors (file might be in invalid state while being edited) } @@ -773,9 +768,7 @@ function startThemeWatcher(): void { themeWatcher.close(); themeWatcher = undefined; } - if (onThemeChangeCallback) { - onThemeChangeCallback(); - } + onThemeChangeCallbacks.forEach(cb => cb()); } }, 100); } diff --git a/packages/pi-coding-agent/src/modes/print-mode.ts b/packages/pi-coding-agent/src/modes/print-mode.ts index a2557f99b..a44266450 100644 --- a/packages/pi-coding-agent/src/modes/print-mode.ts +++ b/packages/pi-coding-agent/src/modes/print-mode.ts @@ -45,52 +45,62 @@ export async function runPrintMode(session: AgentSession, options: PrintModeOpti }); // Always subscribe to enable session persistence via _handleAgentEvent - session.subscribe((event) => { + const unsubscribe = session.subscribe((event) => { // In JSON mode, output all events if (mode === "json") { console.log(JSON.stringify(event)); } }); - // Send initial message with attachments - if (initialMessage) { - await session.prompt(initialMessage, { images: initialImages }); - } + let exitCode = 0; - // Send remaining messages - for (const message of messages) { - await session.prompt(message); - } + try { + // Send initial message with attachments + if (initialMessage) { + await session.prompt(initialMessage, { images: initialImages }); + } - // In text mode, output final response - if (mode === "text") { - const state = session.state; - const lastMessage = state.messages[state.messages.length - 1]; + // Send remaining messages + for (const message of messages) { + await session.prompt(message); + } - if (lastMessage?.role === "assistant") { - const assistantMsg = lastMessage as AssistantMessage; + // In text mode, output final response + if (mode === "text") { + const state = session.state; + const lastMessage = state.messages[state.messages.length - 1]; - // Check for error/aborted - if (assistantMsg.stopReason === "error" || assistantMsg.stopReason === "aborted") { - console.error(assistantMsg.errorMessage || `Request ${assistantMsg.stopReason}`); - process.exit(1); - } + if (lastMessage?.role === "assistant") { + const assistantMsg = lastMessage as AssistantMessage; - // Output text content - for (const content of assistantMsg.content) { - if (content.type === "text") { - console.log(content.text); + // Check for error/aborted + if (assistantMsg.stopReason === "error" || assistantMsg.stopReason === "aborted") { + console.error(assistantMsg.errorMessage || `Request ${assistantMsg.stopReason}`); + exitCode = 1; + } else { + // Output text content + for (const content of assistantMsg.content) { + if (content.type === "text") { + console.log(content.text); + } + } } } } + + // Ensure stdout is fully flushed before returning + // This prevents race conditions where the process exits before all output is written + await new Promise((resolve, reject) => { + process.stdout.write("", (err) => { + if (err) reject(err); + else resolve(); + }); + }); + } finally { + unsubscribe(); } - // Ensure stdout is fully flushed before returning - // This prevents race conditions where the process exits before all output is written - await new Promise((resolve, reject) => { - process.stdout.write("", (err) => { - if (err) reject(err); - else resolve(); - }); - }); + if (exitCode !== 0) { + process.exit(exitCode); + } } diff --git a/packages/pi-coding-agent/src/modes/rpc/rpc-client.ts b/packages/pi-coding-agent/src/modes/rpc/rpc-client.ts index a3f91ecc4..c688a049f 100644 --- a/packages/pi-coding-agent/src/modes/rpc/rpc-client.ts +++ b/packages/pi-coding-agent/src/modes/rpc/rpc-client.ts @@ -54,6 +54,7 @@ export type RpcEventListener = (event: AgentEvent) => void; export class RpcClient { private process: ChildProcess | null = null; private stopReadingStdout: (() => void) | null = null; + private _stderrHandler?: (data: Buffer) => void; private eventListeners: RpcEventListener[] = []; private pendingRequests: Map void; reject: (error: Error) => void }> = new Map(); @@ -90,9 +91,10 @@ export class RpcClient { }); // Collect stderr for debugging - this.process.stderr?.on("data", (data) => { + this._stderrHandler = (data: Buffer) => { this.stderr += data.toString(); - }); + }; + this.process.stderr?.on("data", this._stderrHandler); // Set up strict JSONL reader for stdout. this.stopReadingStdout = attachJsonlLineReader(this.process.stdout!, (line) => { @@ -127,6 +129,10 @@ export class RpcClient { this.stopReadingStdout?.(); this.stopReadingStdout = null; + if (this._stderrHandler) { + this.process.stderr?.removeListener("data", this._stderrHandler); + this._stderrHandler = undefined; + } this.process.kill("SIGTERM"); // Wait for process to exit diff --git a/packages/pi-coding-agent/src/modes/rpc/rpc-mode.ts b/packages/pi-coding-agent/src/modes/rpc/rpc-mode.ts index e15c81ae3..fc80a9d3e 100644 --- a/packages/pi-coding-agent/src/modes/rpc/rpc-mode.ts +++ b/packages/pi-coding-agent/src/modes/rpc/rpc-mode.ts @@ -424,7 +424,7 @@ export async function runRpcMode(session: AgentSession): Promise { void extensionsReadyPromise; // Output all agent events as JSON - session.subscribe((event) => { + const unsubscribe = session.subscribe((event) => { output(event); }); @@ -730,6 +730,7 @@ export async function runRpcMode(session: AgentSession): Promise { await currentRunner.emit({ type: "session_shutdown" }); } + unsubscribe(); embeddedInteractiveMode?.stop(); detachInput(); process.stdin.pause(); diff --git a/pkg/package.json b/pkg/package.json index d31c4cf16..dce19ad64 100644 --- a/pkg/package.json +++ b/pkg/package.json @@ -1,6 +1,6 @@ { "name": "@glittercowboy/gsd", - "version": "2.42.0", + "version": "2.43.0", "piConfig": { "name": "gsd", "configDir": ".gsd" diff --git a/scripts/install-hooks.sh b/scripts/install-hooks.sh deleted file mode 100755 index 30bfd629e..000000000 --- a/scripts/install-hooks.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash -# Installs the git pre-commit hook for secret scanning. -# Safe to run multiple times — only installs if not already present. - -set -euo pipefail - -HOOK_DIR="$(git rev-parse --git-dir)/hooks" -HOOK_FILE="$HOOK_DIR/pre-commit" -MARKER="# gsd-secret-scan" - -mkdir -p "$HOOK_DIR" - -# Check if our hook is already installed -if [[ -f "$HOOK_FILE" ]] && grep -q "$MARKER" "$HOOK_FILE" 2>/dev/null; then - echo "secret-scan pre-commit hook already installed." - exit 0 -fi - -# If a pre-commit hook already exists, append; otherwise create -if [[ -f "$HOOK_FILE" ]]; then - echo "" >> "$HOOK_FILE" - echo "$MARKER" >> "$HOOK_FILE" - echo 'bash "$(git rev-parse --show-toplevel)/scripts/secret-scan.sh"' >> "$HOOK_FILE" - echo "secret-scan appended to existing pre-commit hook." -else - cat > "$HOOK_FILE" << 'EOF' -#!/usr/bin/env bash -# gsd-secret-scan -# Pre-commit hook: scan staged files for hardcoded secrets -bash "$(git rev-parse --show-toplevel)/scripts/secret-scan.sh" -EOF - chmod +x "$HOOK_FILE" - echo "secret-scan pre-commit hook installed." -fi diff --git a/scripts/watch-resources.js b/scripts/watch-resources.js index 900afae51..d0a160e26 100644 --- a/scripts/watch-resources.js +++ b/scripts/watch-resources.js @@ -37,6 +37,9 @@ process.stderr.write(`[watch-resources] Initial sync done\n`) // On Linux (Node <20.13) it throws ERR_FEATURE_UNAVAILABLE_ON_PLATFORM. // Fall back to polling on unsupported platforms. let timer = null +let fsWatcher = null +let pollInterval = null + const onChange = () => { if (timer) clearTimeout(timer) timer = setTimeout(() => { @@ -46,13 +49,19 @@ const onChange = () => { } try { - watch(src, { recursive: true }, onChange) + fsWatcher = watch(src, { recursive: true }, onChange) } catch { // Fallback: poll every 2s (Linux without recursive watch support) process.stderr.write(`[watch-resources] fs.watch recursive not supported, falling back to polling\n`) - setInterval(() => { + pollInterval = setInterval(() => { try { sync() } catch {} }, 2000) } +process.on('exit', () => { + if (timer) clearTimeout(timer) + if (fsWatcher) fsWatcher.close() + if (pollInterval) clearInterval(pollInterval) +}) + process.stderr.write(`[watch-resources] Watching src/resources/ → dist/resources/\n`) diff --git a/src/cli.ts b/src/cli.ts index 91c51dec8..6a7fba97a 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -29,6 +29,15 @@ import { stopWebMode } from './web-mode.js' import { getProjectSessionsDir } from './project-sessions.js' import { markStartup, printStartupTimings } from './startup-timings.js' +// --------------------------------------------------------------------------- +// V8 compile cache — Node 22+ can cache compiled bytecode across runs, +// eliminating repeated parse/compile overhead for unchanged modules. +// Must be set early so dynamic imports (extensions, lazy subcommands) benefit. +// --------------------------------------------------------------------------- +if (parseInt(process.versions.node) >= 22) { + process.env.NODE_COMPILE_CACHE ??= join(agentDir, '.compile-cache') +} + // --------------------------------------------------------------------------- // Minimal CLI arg parser — detects print/subagent mode flags // --------------------------------------------------------------------------- @@ -538,8 +547,16 @@ const sessionManager = cliFlags._selectedSessionPath exitIfManagedResourcesAreNewer(agentDir) initResources(agentDir) markStartup('initResources') + +// Overlap resource loading with session manager setup — both are independent. +// resourceLoader.reload() is the most expensive step (jiti compilation), so +// starting it early shaves ~50-200ms off interactive startup. const resourceLoader = buildResourceLoader(agentDir) -await resourceLoader.reload() +const resourceLoadPromise = resourceLoader.reload() + +// While resources load, let session manager finish any async I/O it needs. +// Then await the resource promise before creating the agent session. +await resourceLoadPromise markStartup('resourceLoader.reload') const { session, extensionsResult } = await createAgentSession({ @@ -613,8 +630,9 @@ if (!process.stdin.isTTY) { process.exit(1) } -// Welcome screen — shown on every fresh interactive session before TUI takes over -{ +// Welcome screen — shown on every fresh interactive session before TUI takes over. +// Skip when the first-run banner was already printed in loader.ts (prevents double banner). +if (!process.env.GSD_FIRST_RUN_BANNER) { const { printWelcomeScreen } = await import('./welcome-screen.js') printWelcomeScreen({ version: process.env.GSD_VERSION || '0.0.0', diff --git a/src/loader.ts b/src/loader.ts index f40e2e0c5..237f5bab7 100644 --- a/src/loader.ts +++ b/src/loader.ts @@ -49,7 +49,8 @@ process.env.PI_PACKAGE_DIR = pkgDir process.env.PI_SKIP_VERSION_CHECK = '1' // GSD runs its own update check in cli.ts — suppress pi's process.title = 'gsd' -// Print branded banner on first launch (before ~/.gsd/ exists) +// Print branded banner on first launch (before ~/.gsd/ exists). +// Set GSD_FIRST_RUN_BANNER so cli.ts skips the duplicate welcome screen. if (!existsSync(appRoot)) { const cyan = '\x1b[36m' const green = '\x1b[32m' @@ -62,6 +63,7 @@ if (!existsSync(appRoot)) { ` Get Shit Done ${dim}v${gsdVersion}${reset}\n` + ` ${green}Welcome.${reset} Setting up your environment...\n\n` ) + process.env.GSD_FIRST_RUN_BANNER = '1' } // GSD_CODING_AGENT_DIR — tells pi's getAgentDir() to return ~/.gsd/agent/ instead of ~/.gsd/agent/ diff --git a/src/resource-loader.ts b/src/resource-loader.ts index 0571ac272..ded6d3185 100644 --- a/src/resource-loader.ts +++ b/src/resource-loader.ts @@ -40,6 +40,12 @@ interface ManagedResourceManifest { * causing extension load errors. */ installedExtensionRootFiles?: string[] + /** + * Subdirectory extension names installed in extensions/ by this GSD version. + * Used on the next upgrade to detect and prune subdirectory extensions that + * were removed from the bundle. + */ + installedExtensionDirs?: string[] } export { discoverExtensionEntryPaths } from './extension-discovery.js' @@ -67,14 +73,25 @@ function getBundledGsdVersion(): string { } function writeManagedResourceManifest(agentDir: string): void { - // Record root-level files currently in the bundled extensions source so that - // future upgrades can detect and prune any that get removed or moved. + // Record root-level files and subdirectory extension names currently in the + // bundled extensions source so that future upgrades can detect and prune any + // that get removed or moved. let installedExtensionRootFiles: string[] = [] + let installedExtensionDirs: string[] = [] try { if (existsSync(bundledExtensionsDir)) { - installedExtensionRootFiles = readdirSync(bundledExtensionsDir, { withFileTypes: true }) + const entries = readdirSync(bundledExtensionsDir, { withFileTypes: true }) + installedExtensionRootFiles = entries .filter(e => e.isFile()) .map(e => e.name) + installedExtensionDirs = entries + .filter(e => e.isDirectory()) + .filter(e => { + // Only track directories that are actual extensions (contain index.js or index.ts) + const dirPath = join(bundledExtensionsDir, e.name) + return existsSync(join(dirPath, 'index.js')) || existsSync(join(dirPath, 'index.ts')) + }) + .map(e => e.name) } } catch { /* non-fatal */ } @@ -83,6 +100,7 @@ function writeManagedResourceManifest(agentDir: string): void { syncedAt: Date.now(), contentHash: computeResourceFingerprint(), installedExtensionRootFiles, + installedExtensionDirs, } writeFileSync(getManagedResourceManifestPath(agentDir), JSON.stringify(manifest)) } @@ -314,24 +332,40 @@ function pruneRemovedBundledExtensions( // Current bundled root-level files (what the new version provides) const currentSourceFiles = new Set() + // Current bundled subdirectory extensions + const currentSourceDirs = new Set() try { if (existsSync(bundledExtensionsDir)) { for (const e of readdirSync(bundledExtensionsDir, { withFileTypes: true })) { if (e.isFile()) currentSourceFiles.add(e.name) + if (e.isDirectory()) currentSourceDirs.add(e.name) } } } catch { /* non-fatal */ } - const removeIfStale = (fileName: string) => { + const removeFileIfStale = (fileName: string) => { if (currentSourceFiles.has(fileName)) return // still in bundle, not stale const stale = join(extensionsDir, fileName) try { if (existsSync(stale)) rmSync(stale, { force: true }) } catch { /* non-fatal */ } } + const removeDirIfStale = (dirName: string) => { + if (currentSourceDirs.has(dirName)) return // still in bundle, not stale + const stale = join(extensionsDir, dirName) + try { if (existsSync(stale)) rmSync(stale, { recursive: true, force: true }) } catch { /* non-fatal */ } + } + if (manifest?.installedExtensionRootFiles) { // Manifest-based: remove previously-installed root files that are no longer bundled for (const prevFile of manifest.installedExtensionRootFiles) { - removeIfStale(prevFile) + removeFileIfStale(prevFile) + } + } + + if (manifest?.installedExtensionDirs) { + // Manifest-based: remove previously-installed subdirectory extensions that are no longer bundled + for (const prevDir of manifest.installedExtensionDirs) { + removeDirIfStale(prevDir) } } @@ -339,7 +373,7 @@ function pruneRemovedBundledExtensions( // These were installed by pre-manifest versions so they may not appear in // installedExtensionRootFiles even when a manifest exists. // env-utils.js was moved from extensions/ root → gsd/ in v2.39.x (#1634) - removeIfStale('env-utils.js') + removeFileIfStale('env-utils.js') } /** @@ -452,5 +486,6 @@ export function buildResourceLoader(agentDir: string): DefaultResourceLoader { return new DefaultResourceLoader({ agentDir, additionalExtensionPaths: piExtensionPaths, - }) + bundledExtensionNames: bundledKeys, + } as ConstructorParameters[0]) } diff --git a/src/resources/extensions/async-jobs/async-bash-timeout.test.ts b/src/resources/extensions/async-jobs/async-bash-timeout.test.ts new file mode 100644 index 000000000..3ab48424d --- /dev/null +++ b/src/resources/extensions/async-jobs/async-bash-timeout.test.ts @@ -0,0 +1,122 @@ +/** + * async-bash-timeout.test.ts — Tests for async_bash timeout behavior. + * + * Reproduces issue #2186: when an async bash job exceeds its timeout and + * the child process ignores SIGTERM, the promise hangs indefinitely. + * The fix adds a SIGKILL fallback and a hard deadline that force-resolves + * the promise so execution can continue. + */ + +import test from "node:test"; +import assert from "node:assert/strict"; +import { createAsyncBashTool } from "./async-bash-tool.ts"; +import { AsyncJobManager } from "./job-manager.ts"; + +function getTextFromResult(result: { content: Array<{ type: string; text?: string }> }): string { + return result.content.map((c) => c.text ?? "").join("\n"); +} + +const noopSignal = new AbortController().signal; + +test("async_bash with timeout resolves even if process ignores SIGTERM", async () => { + const manager = new AsyncJobManager(); + const tool = createAsyncBashTool(() => manager, () => process.cwd()); + + // Start a job that traps SIGTERM (ignores it), with a 2s timeout. + // The process installs a SIGTERM trap and sleeps for 60s. + // Before the fix, this would hang forever because SIGTERM is ignored + // and the close event never fires. + const result = await tool.execute( + "tc-timeout", + { + command: "trap '' TERM; sleep 60", + timeout: 2, + label: "sigterm-resistant", + }, + noopSignal, + () => {}, + undefined as never, + ); + + const text = getTextFromResult(result); + assert.match(text, /sigterm-resistant/); + + const jobId = text.match(/\*\*(bg_[a-f0-9]+)\*\*/)?.[1]; + assert.ok(jobId, "Should have returned a job ID"); + + // Now await the job — it should resolve within a reasonable time + // (timeout 2s + SIGKILL grace 5s + buffer = well under 15s) + const start = Date.now(); + const job = manager.getJob(jobId)!; + assert.ok(job, "Job should exist"); + + await Promise.race([ + job.promise, + new Promise((_, reject) => { + const t = setTimeout(() => reject(new Error( + `Job promise hung for ${Date.now() - start}ms — ` + + `this is the bug from issue #2186: timeout hangs indefinitely`, + )), 15_000); + if (typeof t === "object" && "unref" in t) t.unref(); + }), + ]); + + const elapsed = Date.now() - start; + // Should have resolved well within 15s (timeout 2s + kill grace ~5s) + assert.ok(elapsed < 15_000, `Job took ${elapsed}ms — expected <15s`); + + // Job should have completed (resolved, not rejected) with timeout message + assert.ok( + job.status === "completed" || job.status === "failed", + `Job status should be completed or failed, got: ${job.status}`, + ); + + if (job.status === "completed") { + assert.ok( + job.resultText?.includes("timed out") || job.resultText?.includes("Timed out"), + `Result should mention timeout, got: ${job.resultText}`, + ); + } + + manager.shutdown(); +}); + +test("async_bash with timeout resolves normally when process exits on SIGTERM", async () => { + const manager = new AsyncJobManager(); + const tool = createAsyncBashTool(() => manager, () => process.cwd()); + + // Start a normal sleep that will die on SIGTERM, with a 1s timeout + const result = await tool.execute( + "tc-normal-timeout", + { + command: "sleep 60", + timeout: 1, + label: "normal-timeout", + }, + noopSignal, + () => {}, + undefined as never, + ); + + const text = getTextFromResult(result); + const jobId = text.match(/\*\*(bg_[a-f0-9]+)\*\*/)?.[1]; + assert.ok(jobId, "Should have returned a job ID"); + + const job = manager.getJob(jobId)!; + const start = Date.now(); + + await Promise.race([ + job.promise, + new Promise((_, reject) => { + const t = setTimeout(() => reject(new Error("Job hung")), 10_000); + if (typeof t === "object" && "unref" in t) t.unref(); + }), + ]); + + const elapsed = Date.now() - start; + assert.ok(elapsed < 5_000, `Expected quick resolution after SIGTERM, took ${elapsed}ms`); + assert.equal(job.status, "completed"); + assert.ok(job.resultText?.includes("timed out"), `Should mention timeout: ${job.resultText}`); + + manager.shutdown(); +}); diff --git a/src/resources/extensions/async-jobs/async-bash-tool.ts b/src/resources/extensions/async-jobs/async-bash-tool.ts index b20a78b7b..a2b29b97b 100644 --- a/src/resources/extensions/async-jobs/async-bash-tool.ts +++ b/src/resources/extensions/async-jobs/async-bash-tool.ts @@ -109,6 +109,10 @@ function executeBashInBackground( timeout?: number, ): Promise { return new Promise((resolve, reject) => { + let settled = false; + const safeResolve = (value: string) => { if (!settled) { settled = true; resolve(value); } }; + const safeReject = (err: unknown) => { if (!settled) { settled = true; reject(err); } }; + const { shell, args } = getShellConfig(); const resolvedCommand = sanitizeCommand(command); @@ -121,11 +125,39 @@ function executeBashInBackground( let timedOut = false; let timeoutHandle: ReturnType | undefined; + let sigkillHandle: ReturnType | undefined; + let hardDeadlineHandle: ReturnType | undefined; + + /** Grace period (ms) between SIGTERM and SIGKILL. */ + const SIGKILL_GRACE_MS = 5_000; + /** Hard deadline (ms) after SIGKILL to force-resolve the promise. */ + const HARD_DEADLINE_MS = 3_000; if (timeout !== undefined && timeout > 0) { timeoutHandle = setTimeout(() => { timedOut = true; if (child.pid) killTree(child.pid); + + // If the process ignores SIGTERM, escalate to SIGKILL + sigkillHandle = setTimeout(() => { + if (child.pid) { + try { process.kill(-child.pid, "SIGKILL"); } catch { /* ignore */ } + try { process.kill(child.pid, "SIGKILL"); } catch { /* ignore */ } + } + + // Hard deadline: if even SIGKILL doesn't trigger 'close', + // force-resolve so the job doesn't hang forever (#2186). + hardDeadlineHandle = setTimeout(() => { + const output = Buffer.concat(chunks).toString("utf-8"); + safeResolve( + output + ? `${output}\n\nCommand timed out after ${timeout} seconds (force-killed)` + : `Command timed out after ${timeout} seconds (force-killed)`, + ); + }, HARD_DEADLINE_MS); + if (typeof hardDeadlineHandle === "object" && "unref" in hardDeadlineHandle) hardDeadlineHandle.unref(); + }, SIGKILL_GRACE_MS); + if (typeof sigkillHandle === "object" && "unref" in sigkillHandle) sigkillHandle.unref(); }, timeout * 1000); } @@ -168,24 +200,28 @@ function executeBashInBackground( child.on("error", (err) => { if (timeoutHandle) clearTimeout(timeoutHandle); + if (sigkillHandle) clearTimeout(sigkillHandle); + if (hardDeadlineHandle) clearTimeout(hardDeadlineHandle); signal.removeEventListener("abort", onAbort); - reject(err); + safeReject(err); }); child.on("close", (code) => { if (timeoutHandle) clearTimeout(timeoutHandle); + if (sigkillHandle) clearTimeout(sigkillHandle); + if (hardDeadlineHandle) clearTimeout(hardDeadlineHandle); signal.removeEventListener("abort", onAbort); if (spillStream) spillStream.end(); if (signal.aborted) { const output = Buffer.concat(chunks).toString("utf-8"); - resolve(output ? `${output}\n\nCommand aborted` : "Command aborted"); + safeResolve(output ? `${output}\n\nCommand aborted` : "Command aborted"); return; } if (timedOut) { const output = Buffer.concat(chunks).toString("utf-8"); - resolve(output ? `${output}\n\nCommand timed out after ${timeout} seconds` : `Command timed out after ${timeout} seconds`); + safeResolve(output ? `${output}\n\nCommand timed out after ${timeout} seconds` : `Command timed out after ${timeout} seconds`); return; } @@ -208,7 +244,7 @@ function executeBashInBackground( text += `\n\nCommand exited with code ${code}`; } - resolve(text); + safeResolve(text); }); }); } diff --git a/src/resources/extensions/async-jobs/await-tool.test.ts b/src/resources/extensions/async-jobs/await-tool.test.ts index 3a93c4569..1ed49161c 100644 --- a/src/resources/extensions/async-jobs/await-tool.test.ts +++ b/src/resources/extensions/async-jobs/await-tool.test.ts @@ -118,3 +118,50 @@ test("await_job returns not-found message for invalid job IDs", async () => { manager.shutdown(); }); + +test("await_job marks jobs as awaited to suppress follow-up delivery (#2248)", async () => { + const followUps: string[] = []; + const manager = new AsyncJobManager({ + onJobComplete: (job) => { + if (!job.awaited) followUps.push(job.id); + }, + }); + const tool = createAwaitTool(() => manager); + + // Register a job that completes in 50ms + const jobId = manager.register("bash", "awaited-job", async () => { + return new Promise((resolve) => setTimeout(() => resolve("result"), 50)); + }); + + // await_job consumes the result — should mark as awaited before promise resolves + await tool.execute("tc7", { jobs: [jobId] }, noopSignal, () => {}, undefined as never); + + // Give the onJobComplete callback a tick to fire + await new Promise((r) => setTimeout(r, 50)); + + assert.equal(followUps.length, 0, "onJobComplete should not deliver follow-up for awaited jobs"); + + manager.shutdown(); +}); + +test("unawaited jobs still get follow-up delivery (#2248)", async () => { + const followUps: string[] = []; + const manager = new AsyncJobManager({ + onJobComplete: (job) => { + if (!job.awaited) followUps.push(job.id); + }, + }); + + // Register a fire-and-forget job + const jobId = manager.register("bash", "fire-and-forget", async () => "done"); + const job = manager.getJob(jobId)!; + await job.promise; + + // Give the callback a tick + await new Promise((r) => setTimeout(r, 50)); + + assert.equal(followUps.length, 1, "onJobComplete should deliver follow-up for unawaited jobs"); + assert.equal(followUps[0], jobId); + + manager.shutdown(); +}); diff --git a/src/resources/extensions/async-jobs/await-tool.ts b/src/resources/extensions/async-jobs/await-tool.ts index e6c1e77d4..bab79270a 100644 --- a/src/resources/extensions/async-jobs/await-tool.ts +++ b/src/resources/extensions/async-jobs/await-tool.ts @@ -66,6 +66,11 @@ export function createAwaitTool(getManager: () => AsyncJobManager): ToolDefiniti } } + // Mark all watched jobs as awaited upfront so the onJobComplete + // callback (which fires synchronously in the promise .then()) knows + // to suppress the follow-up message. + for (const j of watched) j.awaited = true; + // If all watched jobs are already done, return immediately const running = watched.filter((j) => j.status === "running"); if (running.length === 0) { diff --git a/src/resources/extensions/async-jobs/index.ts b/src/resources/extensions/async-jobs/index.ts index 62cd4bbb4..3b8009774 100644 --- a/src/resources/extensions/async-jobs/index.ts +++ b/src/resources/extensions/async-jobs/index.ts @@ -42,6 +42,7 @@ export default function AsyncJobs(pi: ExtensionAPI) { manager = new AsyncJobManager({ onJobComplete: (job) => { + if (job.awaited) return; const statusEmoji = job.status === "completed" ? "done" : "error"; const elapsed = ((Date.now() - job.startTime) / 1000).toFixed(1); const output = job.status === "completed" diff --git a/src/resources/extensions/async-jobs/job-manager.ts b/src/resources/extensions/async-jobs/job-manager.ts index 90034b1d4..c5b1abf4e 100644 --- a/src/resources/extensions/async-jobs/job-manager.ts +++ b/src/resources/extensions/async-jobs/job-manager.ts @@ -22,6 +22,8 @@ export interface Job { promise: Promise; resultText?: string; errorText?: string; + /** Set by await_job when results are consumed. Suppresses follow-up delivery. */ + awaited?: boolean; } export interface JobManagerOptions { diff --git a/src/resources/extensions/bg-shell/overlay.ts b/src/resources/extensions/bg-shell/overlay.ts index ddaf744bb..5dd6a3872 100644 --- a/src/resources/extensions/bg-shell/overlay.ts +++ b/src/resources/extensions/bg-shell/overlay.ts @@ -430,6 +430,10 @@ export class BgManagerOverlay { return this.box(inner, width); } + dispose(): void { + clearInterval(this.refreshTimer); + } + invalidate(): void { this.cachedWidth = undefined; this.cachedLines = undefined; diff --git a/src/resources/extensions/gsd/auto-prompts.ts b/src/resources/extensions/gsd/auto-prompts.ts index d8a64e218..587484b4b 100644 --- a/src/resources/extensions/gsd/auto-prompts.ts +++ b/src/resources/extensions/gsd/auto-prompts.ts @@ -428,8 +428,6 @@ export function buildSkillActivationBlock(params: { params.sliceTitle, params.taskId, params.taskTitle, - ...(params.extraContext ?? []), - params.taskPlanContent ?? undefined, ); const visibleSkills = (typeof getLoadedSkills === 'function' ? getLoadedSkills() : []).filter(skill => !skill.disableModelInvocation); @@ -460,12 +458,6 @@ export function buildSkillActivationBlock(params: { } } - for (const skill of visibleSkills) { - if (skillMatchesContext(skill, contextTokens)) { - matched.add(normalizeSkillReference(skill.name)); - } - } - const ordered = [...matched] .filter(name => installedNames.has(name) && !avoided.has(name)) .sort(); @@ -979,11 +971,7 @@ export async function buildPlanSlicePrompt( const executorContextConstraints = formatExecutorConstraints(); const outputRelPath = relSliceFile(base, mid, sid, "PLAN"); - const prefs = loadEffectiveGSDPreferences(); - const commitDocsEnabled = prefs?.preferences?.git?.commit_docs !== false; - const commitInstruction = commitDocsEnabled - ? `Commit the plan files only: \`git add ${relSlicePath(base, mid, sid)}/ .gsd/DECISIONS.md .gitignore && git commit -m "docs(${sid}): add slice plan"\`. Do not stage .gsd/STATE.md or other runtime files — the system manages those.` - : "Do not commit — planning docs are not tracked in git for this project."; + const commitInstruction = "Do not commit — .gsd/ planning docs are managed externally and not tracked in git."; return loadPrompt("plan-slice", { workingDirectory: base, milestoneId: mid, sliceId: sid, sliceTitle: sTitle, @@ -1489,11 +1477,7 @@ export async function buildReassessRoadmapPrompt( // Non-fatal — captures module may not be available } - const reassessPrefs = loadEffectiveGSDPreferences(); - const reassessCommitDocsEnabled = reassessPrefs?.preferences?.git?.commit_docs !== false; - const reassessCommitInstruction = reassessCommitDocsEnabled - ? `Commit: \`docs(${mid}): reassess roadmap after ${completedSliceId}\`. Stage only the .gsd/milestones/ files you changed — do not stage .gsd/STATE.md or other runtime files.` - : "Do not commit — planning docs are not tracked in git for this project."; + const reassessCommitInstruction = "Do not commit — .gsd/ planning docs are managed externally and not tracked in git."; return loadPrompt("reassess-roadmap", { workingDirectory: base, diff --git a/src/resources/extensions/gsd/auto-start.ts b/src/resources/extensions/gsd/auto-start.ts index 192e7a55f..abe3f0c8f 100644 --- a/src/resources/extensions/gsd/auto-start.ts +++ b/src/resources/extensions/gsd/auto-start.ts @@ -167,22 +167,19 @@ export async function bootstrapAutoSession( // ensureGitignore checks for git-tracked .gsd/ files and skips the // ".gsd" pattern if the project intentionally tracks .gsd/ in git. const gitPrefs = loadEffectiveGSDPreferences()?.preferences?.git; - const commitDocs = gitPrefs?.commit_docs; const manageGitignore = gitPrefs?.manage_gitignore; - ensureGitignore(base, { commitDocs, manageGitignore }); + ensureGitignore(base, { manageGitignore }); if (manageGitignore !== false) untrackRuntimeFiles(base); // Bootstrap .gsd/ if it doesn't exist const gsdDir = join(base, ".gsd"); if (!existsSync(gsdDir)) { mkdirSync(join(gsdDir, "milestones"), { recursive: true }); - if (commitDocs !== false) { - try { - nativeAddAll(base); - nativeCommit(base, "chore: init gsd"); - } catch { - /* nothing to commit */ - } + try { + nativeAddAll(base); + nativeCommit(base, "chore: init gsd"); + } catch { + /* nothing to commit */ } } @@ -487,7 +484,7 @@ export async function bootstrapAutoSession( // Capture integration branch if (s.currentMilestoneId) { if (getIsolationMode() !== "none") { - captureIntegrationBranch(base, s.currentMilestoneId, { commitDocs }); + captureIntegrationBranch(base, s.currentMilestoneId); } setActiveMilestoneId(base, s.currentMilestoneId); } diff --git a/src/resources/extensions/gsd/auto-supervisor.ts b/src/resources/extensions/gsd/auto-supervisor.ts index 4777f68e2..49bfbeca0 100644 --- a/src/resources/extensions/gsd/auto-supervisor.ts +++ b/src/resources/extensions/gsd/auto-supervisor.ts @@ -13,6 +13,10 @@ import { nativeHasChanges } from "./native-git-bridge.js"; /** Signals that should trigger lock cleanup on process termination. */ const CLEANUP_SIGNALS: NodeJS.Signals[] = ["SIGTERM", "SIGHUP", "SIGINT"]; +/** Module-level reference to the last registered handler, used as a safety net + * to prevent handler accumulation if the caller neglects to pass previousHandler. */ +let _currentSigtermHandler: (() => void) | null = null; + /** * Register signal handlers that clear lock files and exit cleanly. * Installs handlers on SIGTERM, SIGHUP, and SIGINT so that lock files @@ -29,15 +33,22 @@ export function registerSigtermHandler( currentBasePath: string, previousHandler: (() => void) | null, ): () => void { + // Remove the explicitly-passed previous handler if (previousHandler) { for (const sig of CLEANUP_SIGNALS) process.off(sig, previousHandler); } + // Safety net: also remove the module-tracked handler in case the caller + // forgot to pass previousHandler (prevents handler accumulation) + if (_currentSigtermHandler && _currentSigtermHandler !== previousHandler) { + for (const sig of CLEANUP_SIGNALS) process.off(sig, _currentSigtermHandler); + } const handler = () => { clearLock(currentBasePath); releaseSessionLock(currentBasePath); process.exit(0); }; for (const sig of CLEANUP_SIGNALS) process.on(sig, handler); + _currentSigtermHandler = handler; return handler; } @@ -46,6 +57,9 @@ export function deregisterSigtermHandler(handler: (() => void) | null): void { if (handler) { for (const sig of CLEANUP_SIGNALS) process.off(sig, handler); } + if (_currentSigtermHandler === handler) { + _currentSigtermHandler = null; + } } // ─── Working Tree Activity Detection ────────────────────────────────────────── diff --git a/src/resources/extensions/gsd/auto-worktree.ts b/src/resources/extensions/gsd/auto-worktree.ts index d6070fea4..4641e02f6 100644 --- a/src/resources/extensions/gsd/auto-worktree.ts +++ b/src/resources/extensions/gsd/auto-worktree.ts @@ -1105,7 +1105,32 @@ export function mergeMilestoneToMain( } } - // 7. Squash merge — auto-resolve .gsd/ state file conflicts (#530) + // 7. Stash any pre-existing dirty files so the squash merge is not + // blocked by unrelated local changes (#2151). clearProjectRootStateFiles + // only removes untracked .gsd/ files; tracked dirty files elsewhere (e.g. + // .planning/work-state.json with stash conflict markers) are invisible to + // that cleanup but will cause `git merge --squash` to reject. + let stashed = false; + try { + const status = execFileSync("git", ["status", "--porcelain"], { + cwd: originalBasePath_, + stdio: ["ignore", "pipe", "pipe"], + encoding: "utf-8", + }).trim(); + if (status) { + execFileSync( + "git", + ["stash", "push", "--include-untracked", "-m", `gsd: pre-merge stash for ${milestoneId}`], + { cwd: originalBasePath_, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }, + ); + stashed = true; + } + } catch { + // Stash failure is non-fatal — proceed without stash and let the merge + // report the dirty tree if it fails. + } + + // 8. Squash merge — auto-resolve .gsd/ state file conflicts (#530) const mergeResult = nativeMergeSquash(originalBasePath_, milestoneBranch); if (!mergeResult.success) { @@ -1113,12 +1138,27 @@ export function mergeMilestoneToMain( // untracked .gsd/ files left by syncStateToProjectRoot). Preserve the // milestone branch so commits are not lost. if (mergeResult.conflicts.includes("__dirty_working_tree__")) { + // Pop stash before throwing so local work is not lost. + if (stashed) { + try { + execFileSync("git", ["stash", "pop"], { + cwd: originalBasePath_, + stdio: ["ignore", "pipe", "pipe"], + encoding: "utf-8", + }); + } catch { /* stash pop conflict is non-fatal */ } + } // Restore cwd so the caller is not stranded on the integration branch process.chdir(previousCwd); + // Surface the actual dirty filenames from git stderr instead of + // generically blaming .gsd/ (#2151). + const fileList = mergeResult.dirtyFiles?.length + ? `Dirty files:\n${mergeResult.dirtyFiles.map((f) => ` ${f}`).join("\n")}` + : `Check \`git status\` in the project root for details.`; throw new GSDError( GSD_GIT_ERROR, - `Squash merge of ${milestoneBranch} rejected: working tree has dirty or untracked files that conflict with the merge. ` + - `Clean the project root .gsd/ directory and retry.`, + `Squash merge of ${milestoneBranch} rejected: working tree has dirty or untracked files ` + + `that conflict with the merge. ${fileList}`, ); } @@ -1154,6 +1194,16 @@ export function mergeMilestoneToMain( // If there are still non-.gsd conflicts, escalate if (codeConflicts.length > 0) { + // Pop stash before throwing so local work is not lost (#2151). + if (stashed) { + try { + execFileSync("git", ["stash", "pop"], { + cwd: originalBasePath_, + stdio: ["ignore", "pipe", "pipe"], + encoding: "utf-8", + }); + } catch { /* stash pop conflict is non-fatal */ } + } throw new MergeConflictError( codeConflicts, "squash", @@ -1165,11 +1215,11 @@ export function mergeMilestoneToMain( // No conflicts detected — possibly "already up to date", fall through to commit } - // 8. Commit (handle nothing-to-commit gracefully) + // 9. Commit (handle nothing-to-commit gracefully) const commitResult = nativeCommit(originalBasePath_, commitMessage); const nothingToCommit = commitResult === null; - // 8a. Clean up SQUASH_MSG left by git merge --squash (#1853). + // 9a. Clean up SQUASH_MSG left by git merge --squash (#1853). // git only removes SQUASH_MSG when the commit reads it directly (plain // `git commit`). nativeCommit uses `-F -` (stdin) or libgit2, neither // of which trigger git's SQUASH_MSG cleanup. If left on disk, doctor @@ -1179,7 +1229,23 @@ export function mergeMilestoneToMain( if (existsSync(squashMsgPath)) unlinkSync(squashMsgPath); } catch { /* best-effort */ } - // 8b. Safety check (#1792): if nothing was committed, verify the milestone + // 9a-ii. Restore stashed files now that the merge+commit is complete (#2151). + // Pop after commit so stashed changes do not interfere with the squash merge + // or the commit content. Conflict on pop is non-fatal — the stash entry is + // preserved and the user can resolve manually with `git stash pop`. + if (stashed) { + try { + execFileSync("git", ["stash", "pop"], { + cwd: originalBasePath_, + stdio: ["ignore", "pipe", "pipe"], + encoding: "utf-8", + }); + } catch { + // Stash pop conflict is non-fatal — stash entry persists for manual resolution. + } + } + + // 9b. Safety check (#1792): if nothing was committed, verify the milestone // work is already on the integration branch before allowing teardown. // Compare only non-.gsd/ paths — .gsd/ state files diverge normally and // are auto-resolved during the squash merge. @@ -1204,7 +1270,7 @@ export function mergeMilestoneToMain( } } - // 8c. Detect whether any non-.gsd/ code files were actually merged (#1906). + // 9c. Detect whether any non-.gsd/ code files were actually merged (#1906). // When a milestone only produced .gsd/ metadata (summaries, roadmaps) but no // real code, the user sees "milestone complete" but nothing changed in their // codebase. Surface this so the caller can warn the user. @@ -1225,7 +1291,7 @@ export function mergeMilestoneToMain( } } - // 9. Auto-push if enabled + // 10. Auto-push if enabled let pushed = false; if (prefs.auto_push === true && !nothingToCommit) { const remote = prefs.remote ?? "origin"; @@ -1271,11 +1337,11 @@ export function mergeMilestoneToMain( } } - // 10. Guard removed — step 8b (#1792) now handles this with a smarter check: + // 11. Guard removed — step 9b (#1792) now handles this with a smarter check: // throws only when the milestone has unanchored code changes, passes // through when the code is genuinely already on the integration branch. - // 10a. Pre-teardown safety net (#1853): if the worktree still has uncommitted + // 11a. Pre-teardown safety net (#1853): if the worktree still has uncommitted // changes (e.g. nativeHasChanges cache returned stale false, or auto-commit // silently failed), force one final commit so code is not destroyed by // `git worktree remove --force`. @@ -1299,7 +1365,7 @@ export function mergeMilestoneToMain( } } - // 11. Remove worktree directory first (must happen before branch deletion) + // 12. Remove worktree directory first (must happen before branch deletion) try { removeWorktree(originalBasePath_, milestoneId, { branch: null as unknown as string, @@ -1309,14 +1375,14 @@ export function mergeMilestoneToMain( // Best-effort -- worktree dir may already be gone } - // 12. Delete milestone branch (after worktree removal so ref is unlocked) + // 13. Delete milestone branch (after worktree removal so ref is unlocked) try { nativeBranchDelete(originalBasePath_, milestoneBranch); } catch { // Best-effort } - // 13. Clear module state + // 14. Clear module state originalBase = null; nudgeGitBranchCache(previousCwd); diff --git a/src/resources/extensions/gsd/auto/loop-deps.ts b/src/resources/extensions/gsd/auto/loop-deps.ts index 9f540335d..98dcf747d 100644 --- a/src/resources/extensions/gsd/auto/loop-deps.ts +++ b/src/resources/extensions/gsd/auto/loop-deps.ts @@ -109,7 +109,6 @@ export interface LoopDeps { captureIntegrationBranch: ( basePath: string, mid: string, - opts?: { commitDocs?: boolean }, ) => void; getIsolationMode: () => string; getCurrentBranch: (basePath: string) => string; diff --git a/src/resources/extensions/gsd/auto/phases.ts b/src/resources/extensions/gsd/auto/phases.ts index 18c3cdea2..cac6ad545 100644 --- a/src/resources/extensions/gsd/auto/phases.ts +++ b/src/resources/extensions/gsd/auto/phases.ts @@ -261,9 +261,7 @@ export async function runPreDispatch( if (mid) { if (deps.getIsolationMode() !== "none") { - deps.captureIntegrationBranch(s.basePath, mid, { - commitDocs: prefs?.git?.commit_docs, - }); + deps.captureIntegrationBranch(s.basePath, mid); } deps.resolver.enterMilestone(mid, ctx.ui); } else { diff --git a/src/resources/extensions/gsd/bootstrap/register-hooks.ts b/src/resources/extensions/gsd/bootstrap/register-hooks.ts index 1ff2452f9..0faa9563f 100644 --- a/src/resources/extensions/gsd/bootstrap/register-hooks.ts +++ b/src/resources/extensions/gsd/bootstrap/register-hooks.ts @@ -20,21 +20,34 @@ import { saveActivityLog } from "../activity-log.js"; // printed it before the TUI launched. Only re-print on /clear (subsequent sessions). let isFirstSession = true; +async function syncServiceTierStatus(ctx: ExtensionContext): Promise { + const { getEffectiveServiceTier, formatServiceTierFooterStatus } = await import("../service-tier.js"); + ctx.ui.setStatus("gsd-fast", formatServiceTierFooterStatus(getEffectiveServiceTier(), ctx.model?.id)); +} + export function registerHooks(pi: ExtensionAPI): void { pi.on("session_start", async (_event, ctx) => { resetWriteGateState(); resetToolCallLoopGuard(); + await syncServiceTierStatus(ctx); + + // Apply show_token_cost preference (#1515) + try { + const { loadEffectiveGSDPreferences } = await import("../preferences.js"); + const prefs = loadEffectiveGSDPreferences(); + process.env.GSD_SHOW_TOKEN_COST = prefs?.preferences.show_token_cost ? "1" : ""; + } catch { /* non-fatal */ } if (isFirstSession) { isFirstSession = false; } else { try { const gsdBinPath = process.env.GSD_BIN_PATH; if (gsdBinPath) { - const { dirname } = await import('node:path'); + const { dirname } = await import("node:path"); const { printWelcomeScreen } = await import( - join(dirname(gsdBinPath), 'welcome-screen.js') + join(dirname(gsdBinPath), "welcome-screen.js") ) as { printWelcomeScreen: (opts: { version: string; modelName?: string; provider?: string }) => void }; - printWelcomeScreen({ version: process.env.GSD_VERSION || '0.0.0' }); + printWelcomeScreen({ version: process.env.GSD_VERSION || "0.0.0" }); } } catch { /* non-fatal */ } } @@ -192,8 +205,11 @@ export function registerHooks(pi: ExtensionAPI): void { markToolEnd(event.toolCallId); }); + pi.on("model_select", async (_event, ctx) => { + await syncServiceTierStatus(ctx); + }); + pi.on("before_provider_request", async (event) => { - if (!isAutoActive()) return; const modelId = event.model?.id; if (!modelId) return; const { getEffectiveServiceTier, supportsServiceTier } = await import("../service-tier.js"); @@ -205,4 +221,3 @@ export function registerHooks(pi: ExtensionAPI): void { return payload; }); } - diff --git a/src/resources/extensions/gsd/db-writer.ts b/src/resources/extensions/gsd/db-writer.ts index 2559d5e04..6963b2455 100644 --- a/src/resources/extensions/gsd/db-writer.ts +++ b/src/resources/extensions/gsd/db-writer.ts @@ -9,6 +9,7 @@ // parseDecisionsTable() and parseRequirementsSections() with field fidelity. import { join, resolve } from 'node:path'; +import { readFileSync, existsSync } from 'node:fs'; import type { Decision, Requirement } from './types.js'; import { resolveGsdRootFile } from './paths.js'; import { saveFile } from './files.js'; @@ -17,6 +18,58 @@ import { invalidateStateCache } from './state.js'; import { clearPathCache } from './paths.js'; import { clearParseCache } from './files.js'; +// ─── Freeform Detection ─────────────────────────────────────────────────── + +/** + * Detect whether a DECISIONS.md file is in canonical table format + * (generated by generateDecisionsMd). + * + * Returns true only if the file starts with the canonical header + * ("# Decisions Register") that generateDecisionsMd produces. + * Files with freeform content — even if they contain an appended + * decisions table section — return false so the freeform content + * is preserved. + */ +export function isDecisionsTableFormat(content: string): boolean { + // The canonical format always starts with "# Decisions Register" + const firstLine = content.split('\n')[0]?.trim() ?? ''; + if (firstLine !== '# Decisions Register') return false; + + // Additionally verify the file has the canonical table header + return content.includes('| # | When | Scope | Decision | Choice | Rationale | Revisable?'); +} + +/** + * Generate a minimal decisions table section (header + rows) for appending + * to a freeform DECISIONS.md file. + */ +function generateDecisionsAppendBlock(decisions: Decision[]): string { + const lines: string[] = []; + lines.push(''); + lines.push('---'); + lines.push(''); + lines.push('## Decisions Table'); + lines.push(''); + lines.push('| # | When | Scope | Decision | Choice | Rationale | Revisable? | Made By |'); + lines.push('|---|------|-------|----------|--------|-----------|------------|---------|'); + + for (const d of decisions) { + const cells = [ + d.id, + d.when_context, + d.scope, + d.decision, + d.choice, + d.rationale, + d.revisable, + d.made_by ?? 'agent', + ].map(cell => (cell ?? '').replace(/\|/g, '\\|')); + lines.push(`| ${cells.join(' | ')} |`); + } + + return lines.join('\n') + '\n'; +} + // ─── Markdown Generators ────────────────────────────────────────────────── /** @@ -230,8 +283,31 @@ export async function saveDecisionToDb( })); } - const md = generateDecisionsMd(allDecisions); const filePath = resolveGsdRootFile(basePath, 'DECISIONS'); + + // Check if existing DECISIONS.md has freeform (non-table) content. + // If so, preserve that content and append/update the decisions table + // at the end instead of overwriting the entire file. + let existingContent: string | null = null; + if (existsSync(filePath)) { + existingContent = readFileSync(filePath, 'utf-8'); + } + + let md: string; + if (existingContent && !isDecisionsTableFormat(existingContent)) { + // Freeform content detected — preserve it and append decisions table. + // Strip any previously appended decisions table section to avoid duplication. + const marker = '---\n\n## Decisions Table'; + const markerIdx = existingContent.indexOf(marker); + const freeformPart = markerIdx >= 0 + ? existingContent.substring(0, markerIdx).trimEnd() + : existingContent.trimEnd(); + md = freeformPart + '\n' + generateDecisionsAppendBlock(allDecisions); + } else { + // Table format or no existing file — full regeneration (original behavior) + md = generateDecisionsMd(allDecisions); + } + await saveFile(filePath, md); // Invalidate file-read caches so deriveState() sees the updated markdown. // Do NOT clear the artifacts table — we just wrote to it intentionally. diff --git a/src/resources/extensions/gsd/detection.ts b/src/resources/extensions/gsd/detection.ts index 9a0c159eb..3c01a277a 100644 --- a/src/resources/extensions/gsd/detection.ts +++ b/src/resources/extensions/gsd/detection.ts @@ -87,6 +87,18 @@ export const PROJECT_FILES = [ "mix.exs", "deno.json", "deno.jsonc", + // .NET + ".sln", + ".csproj", + "Directory.Build.props", + // Git submodules + ".gitmodules", + // Xcode + "project.yml", + ".xcodeproj", + ".xcworkspace", + // Docker + "Dockerfile", ] as const; const LANGUAGE_MAP: Record = { @@ -106,6 +118,13 @@ const LANGUAGE_MAP: Record = { "mix.exs": "elixir", "deno.json": "typescript/deno", "deno.jsonc": "typescript/deno", + ".sln": "dotnet", + ".csproj": "dotnet", + "Directory.Build.props": "dotnet", + "project.yml": "swift/xcode", + ".xcodeproj": "swift/xcode", + ".xcworkspace": "swift/xcode", + "Dockerfile": "docker", }; const MONOREPO_MARKERS = [ diff --git a/src/resources/extensions/gsd/doctor-checks.ts b/src/resources/extensions/gsd/doctor-checks.ts index 862ec3c0a..20fee0fe0 100644 --- a/src/resources/extensions/gsd/doctor-checks.ts +++ b/src/resources/extensions/gsd/doctor-checks.ts @@ -2,7 +2,7 @@ import { existsSync, lstatSync, readdirSync, readFileSync, realpathSync, rmSync, import { basename, dirname, join, sep } from "node:path"; import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js"; -import { readRepoMeta, externalProjectsRoot } from "./repo-identity.js"; +import { readRepoMeta, externalProjectsRoot, cleanNumberedGsdVariants } from "./repo-identity.js"; import { loadFile } from "./files.js"; import { parseRoadmap as parseLegacyRoadmap } from "./parsers-legacy.js"; import { isDbAvailable, getMilestoneSlices } from "./gsd-db.js"; @@ -790,6 +790,37 @@ export async function checkRuntimeHealth( // Non-fatal — external state check failed } + // ── Numbered .gsd collision variants (#2205) ─────────────────────────── + // macOS APFS can create ".gsd 2", ".gsd 3" etc. when a directory blocks + // symlink creation. These must be removed so the canonical .gsd is used. + try { + const variantPattern = /^\.gsd \d+$/; + const entries = readdirSync(basePath); + const variants = entries.filter(e => variantPattern.test(e)); + if (variants.length > 0) { + for (const v of variants) { + issues.push({ + severity: "warning", + code: "numbered_gsd_variant", + scope: "project", + unitId: "project", + message: `Found macOS collision variant "${v}" — this can cause GSD state to appear deleted.`, + file: v, + fixable: true, + }); + } + + if (shouldFix("numbered_gsd_variant")) { + const removed = cleanNumberedGsdVariants(basePath); + for (const name of removed) { + fixesApplied.push(`removed numbered .gsd variant: ${name}`); + } + } + } + } catch { + // Non-fatal — variant check failed + } + // ── Metrics ledger integrity ─────────────────────────────────────────── try { const metricsPath = join(root, "metrics.json"); diff --git a/src/resources/extensions/gsd/doctor-environment.ts b/src/resources/extensions/gsd/doctor-environment.ts index 61f61cd85..17a266ce8 100644 --- a/src/resources/extensions/gsd/doctor-environment.ts +++ b/src/resources/extensions/gsd/doctor-environment.ts @@ -37,6 +37,29 @@ const CMD_TIMEOUT = 5_000; // ── Helpers ──────────────────────────────────────────────────────────────── +/** Worktree sentinel — path segment that marks an auto-worktree directory. */ +const WORKTREE_PATH_SEGMENT = `${join(".gsd", "worktrees")}/`; + +/** + * Resolve the project root when running inside a `.gsd/worktrees//` + * auto-worktree. Returns `null` if not in a worktree. + * + * Detection order: + * 1. `GSD_WORKTREE` env var (set by the worktree launcher) + * 2. `.gsd/worktrees/` segment in basePath + */ +function resolveWorktreeProjectRoot(basePath: string): string | null { + const envRoot = process.env.GSD_WORKTREE; + if (envRoot) return envRoot; + + const normalised = basePath.replace(/\\/g, "/"); + const idx = normalised.indexOf(WORKTREE_PATH_SEGMENT.replace(/\\/g, "/")); + if (idx === -1) return null; + + // Everything before `.gsd/worktrees/` is the project root + return basePath.slice(0, idx); +} + function tryExec(cmd: string, cwd: string): string | null { try { return execSync(cmd, { @@ -111,6 +134,14 @@ function checkDependenciesInstalled(basePath: string): EnvironmentCheckResult | const nodeModules = join(basePath, "node_modules"); if (!existsSync(nodeModules)) { + // In auto-worktrees node_modules is absent by design — the worktree + // symlinks to (or expects) the project root's copy. Fall back to + // checking the project root before reporting an error (#2303). + const projectRoot = resolveWorktreeProjectRoot(basePath); + if (projectRoot && existsSync(join(projectRoot, "node_modules"))) { + return { name: "dependencies", status: "ok", message: "Dependencies installed (project root)" }; + } + return { name: "dependencies", status: "error", diff --git a/src/resources/extensions/gsd/doctor-providers.ts b/src/resources/extensions/gsd/doctor-providers.ts index a06a5c307..99c8c4ede 100644 --- a/src/resources/extensions/gsd/doctor-providers.ts +++ b/src/resources/extensions/gsd/doctor-providers.ts @@ -305,11 +305,24 @@ function checkOptionalProviders(): ProviderCheckResult[] { const optional = ["brave", "tavily", "jina", "context7"] as const; const results: ProviderCheckResult[] = []; + // Determine which search providers are configured so we can suppress + // "not configured" noise for alternative search providers when at least + // one is already active (e.g. don't warn about missing BRAVE_API_KEY + // when Tavily is configured). + const searchProviderIds = ["brave", "tavily"] as const; + const hasAnySearchProvider = searchProviderIds.some(id => resolveKey(id).found); + for (const providerId of optional) { const info = PROVIDER_REGISTRY.find(p => p.id === providerId); if (!info) continue; const lookup = resolveKey(providerId); + + // Skip unconfigured search providers when another search provider is active + if (!lookup.found && hasAnySearchProvider && info.category === "search") { + continue; + } + results.push({ name: providerId, label: info.label, diff --git a/src/resources/extensions/gsd/doctor-types.ts b/src/resources/extensions/gsd/doctor-types.ts index c0c35982f..95ea0e70b 100644 --- a/src/resources/extensions/gsd/doctor-types.ts +++ b/src/resources/extensions/gsd/doctor-types.ts @@ -26,6 +26,7 @@ export type DoctorIssueCode = | "unresolvable_dependency" | "failed_migration" | "broken_symlink" + | "numbered_gsd_variant" // Environment health checks (#1221) | "env_node_version" | "env_dependencies" diff --git a/src/resources/extensions/gsd/file-watcher.ts b/src/resources/extensions/gsd/file-watcher.ts index 98928ed62..a8b0be19c 100644 --- a/src/resources/extensions/gsd/file-watcher.ts +++ b/src/resources/extensions/gsd/file-watcher.ts @@ -3,6 +3,7 @@ import type { EventBus } from "@gsd/pi-coding-agent"; import { relative } from "node:path"; let watcher: FSWatcher | null = null; +let pending = new Map>(); const EVENT_MAP: Record = { "settings.json": "settings-changed", @@ -36,7 +37,7 @@ export async function startFileWatcher( const { watch } = await import("chokidar"); - const pending = new Map>(); + pending = new Map>(); function debounceEmit(event: string): void { const existing = pending.get(event); @@ -90,6 +91,8 @@ export async function startFileWatcher( * Stop the file watcher and clean up resources. */ export async function stopFileWatcher(): Promise { + for (const timer of pending.values()) clearTimeout(timer); + pending.clear(); if (watcher) { await watcher.close(); watcher = null; diff --git a/src/resources/extensions/gsd/forensics.ts b/src/resources/extensions/gsd/forensics.ts index 62c89279d..56a7ce0b5 100644 --- a/src/resources/extensions/gsd/forensics.ts +++ b/src/resources/extensions/gsd/forensics.ts @@ -30,6 +30,9 @@ import { loadPrompt } from "./prompt-loader.js"; import { gsdRoot } from "./paths.js"; import { formatDuration } from "../shared/format-utils.js"; import { getAutoWorktreePath } from "./auto-worktree.js"; +import { loadEffectiveGSDPreferences, loadGlobalGSDPreferences, getGlobalGSDPreferencesPath } from "./preferences.js"; +import { showNextAction } from "../shared/tui.js"; +import { ensurePreferencesFile, serializePreferencesToFrontmatter } from "./commands-prefs-wizard.js"; // ─── Types ──────────────────────────────────────────────────────────────────── @@ -67,6 +70,71 @@ interface ForensicReport { recentUnits: { type: string; id: string; cost: number; duration: number; model: string; finishedAt: number }[]; } +// ─── Duplicate Detection ────────────────────────────────────────────────────── + +const DEDUP_PROMPT_SECTION = ` +## Duplicate Detection (REQUIRED before issue creation) + +Before offering to create a GitHub issue, you MUST search for existing issues and PRs that may already address this bug. This step uses the user's AI tokens for analysis. + +### Search Steps + +1. **Search closed issues** for similar keywords from your diagnosis: + \`\`\` + gh issue list --repo gsd-build/gsd-2 --state closed --search "" --limit 20 + \`\`\` + +2. **Search open PRs** that might contain the fix: + \`\`\` + gh pr list --repo gsd-build/gsd-2 --state open --search "" --limit 10 + \`\`\` + +3. **Search merged PRs** that may have already fixed this: + \`\`\` + gh pr list --repo gsd-build/gsd-2 --state merged --search "" --limit 10 + \`\`\` + +### Analysis + +For each result, compare it against your root-cause diagnosis: +- Does the issue describe the same code path or file? +- Does the PR modify the same file:line you identified? +- Is the symptom description semantically similar even if keywords differ? + +### Present Findings + +If you find potential matches, present them to the user: + +1. **"Already fixed by PR #X — skip issue creation"** — when a merged PR or closed issue clearly addresses the same root cause. Explain why you believe it matches. +2. **"Add my findings to existing issue #Y"** — when an open issue exists for the same bug. Use \`gh issue comment #Y --repo gsd-build/gsd-2\` to add forensic evidence. +3. **"Create new issue anyway"** — when existing results do not cover this specific failure. + +Only proceed to issue creation if no matches were found OR the user explicitly chooses "Create new issue anyway". +`; + +async function writeForensicsDedupPref(ctx: ExtensionCommandContext, enabled: boolean): Promise { + const prefsPath = getGlobalGSDPreferencesPath(); + await ensurePreferencesFile(prefsPath, ctx, "global"); + const existing = loadGlobalGSDPreferences(); + const prefs: Record = existing?.preferences ? { ...existing.preferences } : {}; + prefs.version = prefs.version || 1; + prefs.forensics_dedup = enabled; + + const frontmatter = serializePreferencesToFrontmatter(prefs); + const raw = existsSync(prefsPath) ? readFileSync(prefsPath, "utf-8") : ""; + let body = "\n# GSD Skill Preferences\n\nSee `~/.gsd/agent/extensions/gsd/docs/preferences-reference.md` for full field documentation and examples.\n"; + const start = raw.startsWith("---\n") ? 4 : raw.startsWith("---\r\n") ? 5 : -1; + if (start !== -1) { + const closingIdx = raw.indexOf("\n---", start); + if (closingIdx !== -1) { + const after = raw.slice(closingIdx + 4); + if (after.trim()) body = after; + } + } + + writeFileSync(prefsPath, `---\n${frontmatter}---${body}`, "utf-8"); +} + // ─── Entry Point ────────────────────────────────────────────────────────────── export async function handleForensics( @@ -98,6 +166,29 @@ export async function handleForensics( return; } + // ─── Duplicate detection opt-in ───────────────────────────────────────────── + const effectivePrefs = loadEffectiveGSDPreferences()?.preferences; + let dedupEnabled = effectivePrefs?.forensics_dedup === true; + + if (effectivePrefs?.forensics_dedup === undefined) { + const choice = await showNextAction(ctx, { + title: "Duplicate detection available", + summary: ["Before filing a GitHub issue, forensics can search existing issues and PRs to avoid duplicates.", "This uses additional AI tokens for analysis."], + actions: [ + { id: "enable", label: "Enable duplicate detection", description: "Search issues/PRs before filing (recommended)", recommended: true }, + { id: "skip", label: "Skip for now", description: "File without checking for duplicates" }, + ], + notYetMessage: "You can enable this later via preferences (forensics_dedup: true).", + }); + + if (choice === "enable") { + await writeForensicsDedupPref(ctx, true); + dedupEnabled = true; + } + } + + const dedupSection = dedupEnabled ? DEDUP_PROMPT_SECTION : ""; + ctx.ui.notify("Building forensic report...", "info"); const report = await buildForensicReport(basePath); @@ -117,6 +208,7 @@ export async function handleForensics( problemDescription, forensicData, gsdSourceDir, + dedupSection, }); ctx.ui.notify(`Forensic report saved: ${relative(basePath, savedPath)}`, "info"); diff --git a/src/resources/extensions/gsd/git-service.ts b/src/resources/extensions/gsd/git-service.ts index fe3eeca05..f63fb10ea 100644 --- a/src/resources/extensions/gsd/git-service.ts +++ b/src/resources/extensions/gsd/git-service.ts @@ -9,8 +9,8 @@ */ import { execFileSync, execSync } from "node:child_process"; -import { existsSync, lstatSync, mkdirSync, readFileSync, readdirSync, writeFileSync } from "node:fs"; -import { join, relative } from "node:path"; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; +import { join } from "node:path"; import { gsdRoot } from "./paths.js"; import { GIT_NO_PROMPT_ENV } from "./git-constants.js"; import { loadEffectiveGSDPreferences } from "./preferences.js"; @@ -196,6 +196,10 @@ export const RUNTIME_EXCLUSION_PATHS: readonly string[] = [ ".gsd/completed-units.json", ".gsd/STATE.md", ".gsd/gsd.db", + ".gsd/gsd.db-shm", // SQLite WAL sidecar — always created alongside gsd.db (#2296) + ".gsd/gsd.db-wal", // SQLite WAL sidecar — always created alongside gsd.db (#2296) + ".gsd/journal/", // daily-rotated JSONL event journal (#2296) + ".gsd/doctor-history.jsonl", // doctor run history (#2296) ".gsd/DISCUSSION-MANIFEST.json", ]; @@ -245,7 +249,6 @@ export function writeIntegrationBranch( basePath: string, milestoneId: string, branch: string, - _options?: { commitDocs?: boolean }, ): void { // Don't record slice branches as the integration target if (SLICE_BRANCH_RE.test(branch)) return; @@ -486,80 +489,11 @@ export class GitServiceImpl { // git add -A already skips it and the exclusions are harmless no-ops. const allExclusions = [...RUNTIME_EXCLUSION_PATHS, ...extraExclusions]; nativeAddAllWithExclusions(this.basePath, allExclusions); - - // Force-add .gsd/milestones/ when .gsd is a symlink (#2104). - // When .gsd is a symlink (external state projects), ensureGitignore adds - // `.gsd` to .gitignore. The nativeAddAllWithExclusions call above falls - // back to plain `git add -A` (symlink pathspec rejection), which respects - // .gitignore and silently skips new .gsd/milestones/ files. - // - // `git add -f` also fails with "beyond a symbolic link", so we use - // `git hash-object -w` + `git update-index --add --cacheinfo` to bypass - // the symlink restriction entirely. This stages each milestone artifact - // individually by hashing the file content and updating the index directly. - const gsdPath = join(this.basePath, ".gsd"); - const milestonesDir = join(gsdPath, "milestones"); - try { - if ( - existsSync(gsdPath) && - lstatSync(gsdPath).isSymbolicLink() && - existsSync(milestonesDir) - ) { - this._forceAddMilestoneArtifacts(milestonesDir); - } - } catch { - // Non-fatal: if force-add fails, the commit proceeds without these files. - // This matches existing behavior where milestone artifacts were silently - // omitted — but now we at least attempt to include them. - } } /** Tracks whether runtime file cleanup has run this session. */ private _runtimeFilesCleanedUp = false; - /** - * Recursively collect all files under a directory. - * Returns paths relative to `basePath` (e.g. ".gsd/milestones/M009/SUMMARY.md"). - */ - private _collectFiles(dir: string): string[] { - const files: string[] = []; - for (const entry of readdirSync(dir, { withFileTypes: true })) { - const full = join(dir, entry.name); - if (entry.isDirectory()) { - files.push(...this._collectFiles(full)); - } else if (entry.isFile()) { - files.push(relative(this.basePath, full)); - } - } - return files; - } - - /** - * Stage milestone artifacts through a symlinked .gsd directory (#2104). - * - * `git add` (even with `-f`) refuses to stage files "beyond a symbolic link". - * This method bypasses that restriction by hashing each file with - * `git hash-object -w` and inserting the blob into the index with - * `git update-index --add --cacheinfo 100644 `. - */ - private _forceAddMilestoneArtifacts(milestonesDir: string): void { - const files = this._collectFiles(milestonesDir); - for (const filePath of files) { - const hash = execFileSync("git", ["hash-object", "-w", filePath], { - cwd: this.basePath, - stdio: ["ignore", "pipe", "pipe"], - encoding: "utf-8", - env: GIT_NO_PROMPT_ENV, - }).trim(); - execFileSync("git", ["update-index", "--add", "--cacheinfo", "100644", hash, filePath], { - cwd: this.basePath, - stdio: ["ignore", "pipe", "pipe"], - encoding: "utf-8", - env: GIT_NO_PROMPT_ENV, - }); - } - } - /** * Stage files (smart staging) and commit. * Returns the commit message string on success, or null if nothing to commit. diff --git a/src/resources/extensions/gsd/gitignore.ts b/src/resources/extensions/gsd/gitignore.ts index cb65f8c00..71cf7c2ab 100644 --- a/src/resources/extensions/gsd/gitignore.ts +++ b/src/resources/extensions/gsd/gitignore.ts @@ -29,6 +29,10 @@ const GSD_RUNTIME_PATTERNS = [ ".gsd/completed-units.json", ".gsd/STATE.md", ".gsd/gsd.db", + ".gsd/gsd.db-shm", // SQLite WAL sidecar — always created alongside gsd.db (#2296) + ".gsd/gsd.db-wal", // SQLite WAL sidecar — always created alongside gsd.db (#2296) + ".gsd/journal/", // daily-rotated JSONL event journal (#2296) + ".gsd/doctor-history.jsonl", // doctor run history (#2296) ".gsd/DISCUSSION-MANIFEST.json", ".gsd/milestones/**/*-CONTINUE.md", ".gsd/milestones/**/continue.md", @@ -137,7 +141,7 @@ export function hasGitTrackedGsdFiles(basePath: string): boolean { */ export function ensureGitignore( basePath: string, - options?: { manageGitignore?: boolean; commitDocs?: boolean }, + options?: { manageGitignore?: boolean }, ): boolean { // If manage_gitignore is explicitly false, do not touch .gitignore at all if (options?.manageGitignore === false) return false; diff --git a/src/resources/extensions/gsd/gsd-db.ts b/src/resources/extensions/gsd/gsd-db.ts index 898905202..1cdb8bf1d 100644 --- a/src/resources/extensions/gsd/gsd-db.ts +++ b/src/resources/extensions/gsd/gsd-db.ts @@ -623,7 +623,8 @@ function migrateSchema(db: DbAdapter): void { let currentDb: DbAdapter | null = null; let currentPath: string | null = null; -let currentPid = 0; +let currentPid: number = 0; +let _exitHandlerRegistered = false; export function getDbProvider(): ProviderName | null { loadProvider(); @@ -653,12 +654,25 @@ export function openDatabase(path: string): boolean { currentDb = adapter; currentPath = path; currentPid = process.pid; + + if (!_exitHandlerRegistered) { + _exitHandlerRegistered = true; + process.on("exit", () => { try { closeDatabase(); } catch {} }); + } + return true; } export function closeDatabase(): void { if (currentDb) { - try { currentDb.close(); } catch { /* swallow */ } + try { + currentDb.exec('PRAGMA wal_checkpoint(TRUNCATE)'); + } catch { /* non-fatal — best effort before close */ } + try { + currentDb.close(); + } catch { + // swallow close errors + } currentDb = null; currentPath = null; currentPid = 0; @@ -1455,6 +1469,8 @@ export function getArtifact(path: string): ArtifactRow | null { return rowToArtifact(row); } +// ─── Worktree DB Helpers ────────────────────────────────────────────────── + export function copyWorktreeDb(srcDbPath: string, destDbPath: string): boolean { try { if (!existsSync(srcDbPath)) return false; diff --git a/src/resources/extensions/gsd/native-git-bridge.ts b/src/resources/extensions/gsd/native-git-bridge.ts index dd6d7bae9..edfe81188 100644 --- a/src/resources/extensions/gsd/native-git-bridge.ts +++ b/src/resources/extensions/gsd/native-git-bridge.ts @@ -58,6 +58,8 @@ interface GitBatchInfo { interface GitMergeResult { success: boolean; conflicts: string[]; + /** Filenames extracted from git stderr when a dirty working tree blocks the merge (#2151). */ + dirtyFiles?: string[]; } // ─── Native Module Loading ────────────────────────────────────────────────── @@ -863,7 +865,16 @@ export function nativeMergeSquash(basePath: string, branch: string): GitMergeRes stderr.includes("not possible because you have unmerged files") || stderr.includes("overwritten by merge") ) { - return { success: false, conflicts: ["__dirty_working_tree__"] }; + // Extract filenames from git stderr so callers can report which files + // are dirty instead of generically blaming .gsd/ (#2151). + // Git lists them as tab-indented lines between the "would be overwritten" + // header and the "Please commit" footer. + const dirtyFiles = stderr + .split("\n") + .filter((line) => line.startsWith("\t")) + .map((line) => line.trim()) + .filter(Boolean); + return { success: false, conflicts: ["__dirty_working_tree__"], dirtyFiles }; } // Check for real content conflicts diff --git a/src/resources/extensions/gsd/parallel-orchestrator.ts b/src/resources/extensions/gsd/parallel-orchestrator.ts index 86aa480f7..d2b71be22 100644 --- a/src/resources/extensions/gsd/parallel-orchestrator.ts +++ b/src/resources/extensions/gsd/parallel-orchestrator.ts @@ -54,6 +54,7 @@ export interface WorkerInfo { state: "running" | "paused" | "stopped" | "error"; completedUnits: number; cost: number; + cleanup?: () => void; } export interface OrchestratorState { @@ -357,6 +358,16 @@ export async function startParallel( const config = resolveParallelConfig(prefs); + // Release any leftover state from a previous session before reassigning + if (state) { + for (const w of state.workers.values()) { + w.cleanup?.(); + w.cleanup = undefined; + w.process = null; + } + state.workers.clear(); + } + // Try to restore from a previous crash const restored = restoreState(basePath); if (restored && restored.workers.length > 0) { @@ -598,12 +609,26 @@ export function spawnWorker( worktreePath: worker.worktreePath, }); + // Store cleanup function to remove all listeners from the child process. + // This prevents listener accumulation when workers are respawned, since + // handler closures capture milestoneId and other data that would otherwise + // be retained indefinitely. + worker.cleanup = () => { + child.stdout?.removeAllListeners(); + child.stderr?.removeAllListeners(); + child.removeAllListeners(); + }; + // Handle worker exit child.on("exit", (code) => { if (!state) return; const w = state.workers.get(milestoneId); if (!w) return; + // Remove all stream listeners to release closure references + w.cleanup?.(); + w.cleanup = undefined; + w.process = null; if (w.state === "stopped") return; // graceful stop, already handled @@ -795,6 +820,10 @@ export async function stopParallel( await waitForWorkerExit(worker, 250); } + // Remove stream listeners before releasing the process handle + worker.cleanup?.(); + worker.cleanup = undefined; + // Update in-memory state worker.state = "stopped"; worker.process = null; @@ -880,6 +909,8 @@ export function refreshWorkerStatuses( for (const mid of staleIds) { const worker = state.workers.get(mid); if (worker) { + worker.cleanup?.(); + worker.cleanup = undefined; worker.state = "error"; worker.process = null; } @@ -897,6 +928,8 @@ export function refreshWorkerStatuses( const diskStatus = statusMap.get(mid); if (!diskStatus) { if (!isPidAlive(worker.pid)) { + worker.cleanup?.(); + worker.cleanup = undefined; worker.state = worker.completedUnits > 0 ? "stopped" : "error"; worker.process = null; } @@ -938,5 +971,15 @@ export function isBudgetExceeded(): boolean { /** Reset orchestrator state. Called on clean shutdown. */ export function resetOrchestrator(): void { + if (state) { + // Explicitly release all WorkerInfo references and run any pending + // cleanup callbacks so child process stream closures are freed. + for (const w of state.workers.values()) { + w.cleanup?.(); + w.cleanup = undefined; + w.process = null; + } + state.workers.clear(); + } state = null; } diff --git a/src/resources/extensions/gsd/preferences-types.ts b/src/resources/extensions/gsd/preferences-types.ts index 36e6f83f5..b57e2514f 100644 --- a/src/resources/extensions/gsd/preferences-types.ts +++ b/src/resources/extensions/gsd/preferences-types.ts @@ -89,6 +89,8 @@ export const KNOWN_PREFERENCE_KEYS = new Set([ "reactive_execution", "github", "service_tier", + "forensics_dedup", + "show_token_cost", ]); /** Canonical list of all dispatch unit types. */ @@ -223,6 +225,10 @@ export interface GSDPreferences { github?: GitHubSyncConfig; /** OpenAI service tier preference. "priority" = 2x cost, faster. "flex" = 0.5x cost, slower. Only affects gpt-5.4 models. */ service_tier?: "priority" | "flex"; + /** Opt-in: search existing issues and PRs before filing from /gsd forensics. Uses additional AI tokens. */ + forensics_dedup?: boolean; + /** Opt-in: show per-prompt and cumulative session token cost in the footer. Default: false. */ + show_token_cost?: boolean; } export interface LoadedGSDPreferences { diff --git a/src/resources/extensions/gsd/preferences-validation.ts b/src/resources/extensions/gsd/preferences-validation.ts index d19468a68..bc9fc17d8 100644 --- a/src/resources/extensions/gsd/preferences-validation.ts +++ b/src/resources/extensions/gsd/preferences-validation.ts @@ -747,5 +747,14 @@ export function validatePreferences(preferences: GSDPreferences): { } } + // ─── Show Token Cost ────────────────────────────────────────────── + if (preferences.show_token_cost !== undefined) { + if (typeof preferences.show_token_cost === "boolean") { + validated.show_token_cost = preferences.show_token_cost; + } else { + errors.push("show_token_cost must be a boolean"); + } + } + return { preferences: validated, errors, warnings }; } diff --git a/src/resources/extensions/gsd/preferences.ts b/src/resources/extensions/gsd/preferences.ts index e369525cc..99c91e370 100644 --- a/src/resources/extensions/gsd/preferences.ts +++ b/src/resources/extensions/gsd/preferences.ts @@ -200,12 +200,22 @@ function loadPreferencesFile(path: string, scope: "global" | "project"): LoadedG export function parsePreferencesMarkdown(content: string): GSDPreferences | null { // Use indexOf instead of [\s\S]*? regex to avoid backtracking (#468) const startMarker = content.startsWith('---\r\n') ? '---\r\n' : '---\n'; - if (!content.startsWith(startMarker)) return null; - const searchStart = startMarker.length; - const endIdx = content.indexOf('\n---', searchStart); - if (endIdx === -1) return null; - const block = content.slice(searchStart, endIdx); - return parseFrontmatterBlock(block.replace(/\r/g, '')); + if (content.startsWith(startMarker)) { + const searchStart = startMarker.length; + const endIdx = content.indexOf('\n---', searchStart); + if (endIdx === -1) return null; + const block = content.slice(searchStart, endIdx); + return parseFrontmatterBlock(block.replace(/\r/g, '')); + } + + // Fallback: heading+list format (e.g. "## Git\n- isolation: none") (#2036) + // GSD agents may write preferences files without frontmatter delimiters. + if (/^##\s+\w/m.test(content)) { + return parseHeadingListFormat(content); + } + + console.warn("[parsePreferencesMarkdown] preferences.md exists but uses an unrecognized format — skipping."); + return null; } function parseFrontmatterBlock(frontmatter: string): GSDPreferences { @@ -221,6 +231,51 @@ function parseFrontmatterBlock(frontmatter: string): GSDPreferences { } } +/** + * Parse heading+list format into a nested object, then cast to GSDPreferences. + * Handles markdown like: + * ## Git + * - isolation: none + * - commit_docs: true + * ## Models + * - planner: sonnet + */ +function parseHeadingListFormat(content: string): GSDPreferences { + const result: Record> = {}; + let currentSection: string | null = null; + + for (const rawLine of content.split('\n')) { + const line = rawLine.replace(/\r$/, ''); + const headingMatch = line.match(/^##\s+(.+)$/); + if (headingMatch) { + currentSection = headingMatch[1].trim().toLowerCase().replace(/\s+/g, '_'); + continue; + } + if (currentSection) { + const itemMatch = line.match(/^-\s+([^:]+):\s*(.*)$/); + if (itemMatch) { + if (!result[currentSection]) result[currentSection] = {}; + const value = itemMatch[2].trim(); + // Coerce "true"/"false" strings and numbers + result[currentSection][itemMatch[1].trim()] = value; + } + } + } + + // Convert string values to appropriate types via YAML parser for each section + const typed: Record = {}; + for (const [section, entries] of Object.entries(result)) { + const yamlLines = Object.entries(entries).map(([k, v]) => `${k}: ${v}`).join('\n'); + try { + typed[section] = parseYaml(yamlLines); + } catch { + typed[section] = entries; + } + } + + return typed as GSDPreferences; +} + // ─── Merging ──────────────────────────────────────────────────────────────── /** @@ -286,6 +341,8 @@ function mergePreferences(base: GSDPreferences, override: GSDPreferences): GSDPr ? { ...(base.github ?? {}), ...(override.github ?? {}) } as import("../github-sync/types.js").GitHubSyncConfig : undefined, service_tier: override.service_tier ?? base.service_tier, + forensics_dedup: override.forensics_dedup ?? base.forensics_dedup, + show_token_cost: override.show_token_cost ?? base.show_token_cost, }; } diff --git a/src/resources/extensions/gsd/prompts/forensics.md b/src/resources/extensions/gsd/prompts/forensics.md index 4b3fc9cfe..bad2a126b 100644 --- a/src/resources/extensions/gsd/prompts/forensics.md +++ b/src/resources/extensions/gsd/prompts/forensics.md @@ -101,6 +101,8 @@ Explain your findings: - **Code snippet** — the problematic code and what it should do instead - **Recovery** — what the user can do right now to get unstuck +{{dedupSection}} + Then **offer GitHub issue creation**: "Would you like me to create a GitHub issue for this on gsd-build/gsd-2?" **CRITICAL: The `github_issues` tool ONLY targets the current user's repository — it has no `repo` parameter. You MUST use `gh issue create --repo gsd-build/gsd-2` via the `bash` tool to file on the correct repo. Do NOT use the `github_issues` tool for this.** diff --git a/src/resources/extensions/gsd/repo-identity.ts b/src/resources/extensions/gsd/repo-identity.ts index d3133c3d6..f3e350801 100644 --- a/src/resources/extensions/gsd/repo-identity.ts +++ b/src/resources/extensions/gsd/repo-identity.ts @@ -8,7 +8,7 @@ import { createHash } from "node:crypto"; import { execFileSync } from "node:child_process"; -import { existsSync, lstatSync, mkdirSync, readFileSync, realpathSync, rmSync, symlinkSync, writeFileSync } from "node:fs"; +import { existsSync, lstatSync, mkdirSync, readdirSync, readFileSync, realpathSync, rmSync, symlinkSync, writeFileSync } from "node:fs"; import { homedir } from "node:os"; import { basename, dirname, join, resolve } from "node:path"; @@ -271,15 +271,54 @@ export function externalProjectsRoot(): string { return join(base, "projects"); } +// ─── Numbered Variant Cleanup ──────────────────────────────────────────────── + +/** + * macOS collision pattern: `.gsd 2`, `.gsd 3`, `.gsd 4`, etc. + * + * When `symlinkSync` (or Finder) tries to create `.gsd` but a real directory + * already exists at that path, macOS APFS silently renames the new entry to + * `.gsd 2`, then `.gsd 3`, and so on. These numbered variants confuse GSD + * because the canonical `.gsd` path no longer resolves to the external state + * directory, making tracked planning files appear deleted. + * + * This helper scans the project root for entries matching `.gsd ` and + * removes them. It is called early in `ensureGsdSymlink()` so that the + * canonical `.gsd` path is always the one in use. + */ +const GSD_NUMBERED_VARIANT_RE = /^\.gsd \d+$/; + +export function cleanNumberedGsdVariants(projectPath: string): string[] { + const removed: string[] = []; + try { + const entries = readdirSync(projectPath); + for (const entry of entries) { + if (GSD_NUMBERED_VARIANT_RE.test(entry)) { + const fullPath = join(projectPath, entry); + try { + rmSync(fullPath, { recursive: true, force: true }); + removed.push(entry); + } catch { + // Best-effort: if removal fails (e.g. permissions), continue with next + } + } + } + } catch { + // Non-fatal: readdir failure should not block symlink creation + } + return removed; +} + // ─── Symlink Management ───────────────────────────────────────────────────── /** * Ensure the `/.gsd` symlink points to the external state directory. * - * 1. mkdir -p the external dir - * 2. If `/.gsd` doesn't exist → create symlink - * 3. If `/.gsd` is already the correct symlink → no-op - * 4. If `/.gsd` is a real directory → return as-is (migration handles later) + * 1. Clean up any macOS numbered collision variants (`.gsd 2`, `.gsd 3`, etc.) + * 2. mkdir -p the external dir + * 3. If `/.gsd` doesn't exist → create symlink + * 4. If `/.gsd` is already the correct symlink → no-op + * 5. If `/.gsd` is a real directory → return as-is (migration handles later) * * Returns the resolved external path. */ @@ -297,6 +336,10 @@ export function ensureGsdSymlink(projectPath: string): string { return localGsd; } + // Clean up macOS numbered collision variants (.gsd 2, .gsd 3, etc.) before + // any existence checks — otherwise they accumulate and confuse state (#2205). + cleanNumberedGsdVariants(projectPath); + // Ensure external directory exists mkdirSync(externalPath, { recursive: true }); diff --git a/src/resources/extensions/gsd/service-tier.ts b/src/resources/extensions/gsd/service-tier.ts index 7e2f4613a..9ef836dc6 100644 --- a/src/resources/extensions/gsd/service-tier.ts +++ b/src/resources/extensions/gsd/service-tier.ts @@ -23,6 +23,8 @@ import { ensurePreferencesFile, serializePreferencesToFrontmatter } from "./comm export type ServiceTierSetting = "priority" | "flex" | undefined; +const SERVICE_TIER_SCOPE_NOTE = "Only affects gpt-5.4 models, regardless of provider."; + // ─── Gating ────────────────────────────────────────────────────────────────── /** @@ -51,7 +53,7 @@ export function formatServiceTierStatus(tier: ServiceTierSetting): string { " /gsd fast flex Set to flex (0.5x cost, slower)", " /gsd fast off Disable service tier", "", - "Only affects gpt-5.4 models.", + SERVICE_TIER_SCOPE_NOTE, ].join("\n"); } @@ -64,10 +66,18 @@ export function formatServiceTierStatus(tier: ServiceTierSetting): string { " /gsd fast flex Set to flex (0.5x cost, slower)", " /gsd fast off Disable service tier", "", - "Only affects gpt-5.4 models.", + SERVICE_TIER_SCOPE_NOTE, ].join("\n"); } +export function formatServiceTierFooterStatus( + tier: ServiceTierSetting, + modelId: string | undefined, +): string | undefined { + if (!tier || !modelId || !supportsServiceTier(modelId)) return undefined; + return tier === "priority" ? "fast: ⚡ priority" : "fast: 💰 flex"; +} + // ─── Icon Resolution ───────────────────────────────────────────────────────── /** @@ -148,19 +158,22 @@ export async function handleFast(args: string, ctx: ExtensionCommandContext): Pr if (trimmed === "on") { await writeGlobalServiceTier(ctx, "priority"); - ctx.ui.notify("Service tier set to priority (2x cost, faster responses). Only affects gpt-5.4 models.", "info"); + ctx.ui.setStatus("gsd-fast", formatServiceTierFooterStatus("priority", ctx.model?.id)); + ctx.ui.notify("Service tier set to priority (2x cost, faster responses). Only affects gpt-5.4 models, regardless of provider.", "info"); return; } if (trimmed === "off") { await writeGlobalServiceTier(ctx, undefined); + ctx.ui.setStatus("gsd-fast", undefined); ctx.ui.notify("Service tier disabled.", "info"); return; } if (trimmed === "flex") { await writeGlobalServiceTier(ctx, "flex"); - ctx.ui.notify("Service tier set to flex (0.5x cost, slower responses). Only affects gpt-5.4 models.", "info"); + ctx.ui.setStatus("gsd-fast", formatServiceTierFooterStatus("flex", ctx.model?.id)); + ctx.ui.notify("Service tier set to flex (0.5x cost, slower responses). Only affects gpt-5.4 models, regardless of provider.", "info"); return; } diff --git a/src/resources/extensions/gsd/session-lock.ts b/src/resources/extensions/gsd/session-lock.ts index eb9ea9fcc..dc19f86c4 100644 --- a/src/resources/extensions/gsd/session-lock.ts +++ b/src/resources/extensions/gsd/session-lock.ts @@ -239,7 +239,7 @@ export function acquireSessionLock(basePath: string): SessionLockResult { const elapsed = Date.now() - _lockAcquiredAt; if (elapsed < 1_800_000) { process.stderr.write( - `[gsd] Lock heartbeat mismatch after ${Math.round(elapsed / 1000)}s — event loop stall, continuing.\n`, + `[gsd] Lock heartbeat caught up after ${Math.round(elapsed / 1000)}s — long LLM call, no action needed.\n`, ); return; // Suppress false positive } @@ -299,7 +299,7 @@ export function acquireSessionLock(basePath: string): SessionLockResult { const elapsed = Date.now() - _lockAcquiredAt; if (elapsed < 1_800_000) { process.stderr.write( - `[gsd] Lock heartbeat mismatch after ${Math.round(elapsed / 1000)}s — event loop stall, continuing.\n`, + `[gsd] Lock heartbeat caught up after ${Math.round(elapsed / 1000)}s — long LLM call, no action needed.\n`, ); return; } diff --git a/src/resources/extensions/gsd/tests/activity-log.test.ts b/src/resources/extensions/gsd/tests/activity-log.test.ts index 423701723..8ae1bba4b 100644 --- a/src/resources/extensions/gsd/tests/activity-log.test.ts +++ b/src/resources/extensions/gsd/tests/activity-log.test.ts @@ -4,7 +4,7 @@ * - activity-log-save.test.ts (caching, dedup, collision recovery) */ -import test from "node:test"; +import { describe, test, beforeEach, afterEach } from "node:test"; import assert from "node:assert/strict"; import { existsSync, mkdtempSync, mkdirSync, readdirSync, realpathSync, rmSync, utimesSync, writeFileSync, readFileSync } from "node:fs"; import { join, dirname } from "node:path"; @@ -48,9 +48,12 @@ function createCtx(entries: unknown[]) { // ── Pruning ────────────────────────────────────────────────────────────────── -test("pruneActivityLogs deletes old files, keeps recent and highest-seq", () => { - const dir = createTmpDir(); - try { +describe("pruneActivityLogs", () => { + let dir: string; + beforeEach(() => { dir = createTmpDir(); }); + afterEach(() => { rmSync(dir, { recursive: true, force: true }); }); + + test("deletes old files, keeps recent and highest-seq", () => { const f001 = writeActivityFile(dir, "001", "execute-task-M001-S01-T01"); writeActivityFile(dir, "002", "execute-task-M001-S01-T02"); writeActivityFile(dir, "003", "execute-task-M001-S01-T03"); @@ -61,14 +64,9 @@ test("pruneActivityLogs deletes old files, keeps recent and highest-seq", () => assert.ok(!remaining.includes("001-execute-task-M001-S01-T01.jsonl")); assert.ok(remaining.includes("002-execute-task-M001-S01-T02.jsonl")); assert.ok(remaining.includes("003-execute-task-M001-S01-T03.jsonl")); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("pruneActivityLogs preserves highest-seq even when all files are old", () => { - const dir = createTmpDir(); - try { + test("preserves highest-seq even when all files are old", () => { const f001 = writeActivityFile(dir, "001", "t1"); const f002 = writeActivityFile(dir, "002", "t2"); const f003 = writeActivityFile(dir, "003", "t3"); @@ -78,14 +76,9 @@ test("pruneActivityLogs preserves highest-seq even when all files are old", () = const remaining = listFiles(dir); assert.equal(remaining.length, 1); assert.ok(remaining[0].startsWith("003-")); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("pruneActivityLogs with retentionDays=0 keeps only highest-seq", () => { - const dir = createTmpDir(); - try { + test("with retentionDays=0 keeps only highest-seq", () => { writeActivityFile(dir, "001", "t1"); writeActivityFile(dir, "002", "t2"); writeActivityFile(dir, "003", "t3"); @@ -94,51 +87,31 @@ test("pruneActivityLogs with retentionDays=0 keeps only highest-seq", () => { const remaining = listFiles(dir); assert.equal(remaining.length, 1); assert.ok(remaining[0].startsWith("003-")); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("pruneActivityLogs no-op when all files are recent", () => { - const dir = createTmpDir(); - try { + test("no-op when all files are recent", () => { writeActivityFile(dir, "001", "t1"); writeActivityFile(dir, "002", "t2"); writeActivityFile(dir, "003", "t3"); pruneActivityLogs(dir, 30); assert.equal(listFiles(dir).length, 3); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("pruneActivityLogs handles empty directory", () => { - const dir = createTmpDir(); - try { + test("handles empty directory", () => { assert.doesNotThrow(() => pruneActivityLogs(dir, 30)); assert.equal(readdirSync(dir).length, 0); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("pruneActivityLogs preserves single old file (it is highest-seq)", () => { - const dir = createTmpDir(); - try { + test("preserves single old file (it is highest-seq)", () => { const f = writeActivityFile(dir, "001", "t1"); backdateFile(f, 100); pruneActivityLogs(dir, 30); assert.equal(listFiles(dir).length, 1); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("pruneActivityLogs ignores non-matching filenames", () => { - const dir = createTmpDir(); - try { + test("ignores non-matching filenames", () => { const f001 = writeActivityFile(dir, "001", "t1"); writeFileSync(join(dir, "notes.txt"), "some notes\n", "utf-8"); backdateFile(f001, 40); @@ -148,16 +121,17 @@ test("pruneActivityLogs ignores non-matching filenames", () => { assert.ok(remaining.includes("notes.txt")); // 001 is the only seq file, so it's highest-seq and survives assert.ok(remaining.includes("001-t1.jsonl")); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + }); }); // ── Save: caching, dedup, collision recovery ───────────────────────────────── -test("saveActivityLog caches sequence instead of rescanning", () => { - const baseDir = createTmpDir(); - try { +describe("saveActivityLog", () => { + let baseDir: string; + beforeEach(() => { baseDir = createTmpDir(); }); + afterEach(() => { rmSync(baseDir, { recursive: true, force: true }); }); + + test("caches sequence instead of rescanning", () => { saveActivityLog(createCtx([{ kind: "first", n: 1 }]) as any, baseDir, "execute-task", "M001/S01/T01"); writeFileSync(join(activityDir(baseDir), "999-external.jsonl"), '{"x":1}\n', "utf-8"); saveActivityLog(createCtx([{ kind: "second", n: 2 }]) as any, baseDir, "execute-task", "M001/S01/T02"); @@ -166,14 +140,9 @@ test("saveActivityLog caches sequence instead of rescanning", () => { assert.ok(files.includes("001-execute-task-M001-S01-T01.jsonl")); assert.ok(files.includes("002-execute-task-M001-S01-T02.jsonl")); assert.ok(!files.some(f => f.startsWith("1000-"))); - } finally { - rmSync(baseDir, { recursive: true, force: true }); - } -}); + }); -test("saveActivityLog deduplicates identical snapshots for same unit", () => { - const baseDir = createTmpDir(); - try { + test("deduplicates identical snapshots for same unit", () => { const ctx = createCtx([{ role: "assistant", content: "same" }]); saveActivityLog(ctx as any, baseDir, "plan-slice", "M002/S01"); saveActivityLog(ctx as any, baseDir, "plan-slice", "M002/S01"); @@ -184,14 +153,9 @@ test("saveActivityLog deduplicates identical snapshots for same unit", () => { saveActivityLog(createCtx([{ role: "assistant", content: "changed" }]) as any, baseDir, "plan-slice", "M002/S01"); files = listFiles(activityDir(baseDir)); assert.equal(files.length, 2); - } finally { - rmSync(baseDir, { recursive: true, force: true }); - } -}); + }); -test("saveActivityLog recovers on sequence collision", () => { - const baseDir = createTmpDir(); - try { + test("recovers on sequence collision", () => { saveActivityLog(createCtx([{ turn: 1 }]) as any, baseDir, "execute-task", "M003/S02/T01"); writeFileSync(join(activityDir(baseDir), "002-execute-task-M003-S02-T02.jsonl"), '{"collision":true}\n', "utf-8"); saveActivityLog(createCtx([{ turn: 2 }]) as any, baseDir, "execute-task", "M003/S02/T02"); @@ -199,9 +163,7 @@ test("saveActivityLog recovers on sequence collision", () => { const files = listFiles(activityDir(baseDir)); assert.ok(files.includes("002-execute-task-M003-S02-T02.jsonl")); assert.ok(files.includes("003-execute-task-M003-S02-T02.jsonl")); - } finally { - rmSync(baseDir, { recursive: true, force: true }); - } + }); }); // ── Prompt text assertion ──────────────────────────────────────────────────── diff --git a/src/resources/extensions/gsd/tests/auto-stash-merge.test.ts b/src/resources/extensions/gsd/tests/auto-stash-merge.test.ts new file mode 100644 index 000000000..403caf396 --- /dev/null +++ b/src/resources/extensions/gsd/tests/auto-stash-merge.test.ts @@ -0,0 +1,121 @@ +/** + * auto-stash-merge.test.ts — Regression tests for #2151. + * + * Tests that mergeMilestoneToMain auto-stashes dirty files before squash merge, + * and that nativeMergeSquash returns dirty filenames from git stderr. + */ + +import test from "node:test"; +import assert from "node:assert/strict"; +import { mkdtempSync, mkdirSync, writeFileSync, rmSync, existsSync, readFileSync, realpathSync } from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; +import { execSync } from "node:child_process"; + +import { createAutoWorktree, mergeMilestoneToMain } from "../auto-worktree.ts"; +import { nativeMergeSquash } from "../native-git-bridge.ts"; + +function run(cmd: string, cwd: string): string { + return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); +} + +function createTempRepo(): string { + const dir = realpathSync(mkdtempSync(join(tmpdir(), "wt-autostash-test-"))); + run("git init", dir); + run("git config user.email test@test.com", dir); + run("git config user.name Test", dir); + writeFileSync(join(dir, "README.md"), "# test\n"); + mkdirSync(join(dir, ".gsd"), { recursive: true }); + writeFileSync(join(dir, ".gsd", "STATE.md"), "# State\n"); + run("git add .", dir); + run("git commit -m init", dir); + run("git branch -M main", dir); + return dir; +} + +function makeRoadmap(milestoneId: string, title: string, slices: Array<{ id: string; title: string }>): string { + const sliceLines = slices.map(s => `- [x] **${s.id}: ${s.title}**`).join("\n"); + return `# ${milestoneId}: ${title}\n\n## Slices\n${sliceLines}\n`; +} + +function addSliceToMilestone( + repo: string, wtPath: string, milestoneId: string, + sliceId: string, sliceTitle: string, + commits: Array<{ file: string; content: string; message: string }>, +): void { + const normalizedPath = wtPath.replaceAll("\\", "/"); + const worktreeName = normalizedPath.split("/").pop() || milestoneId; + const sliceBranch = `slice/${worktreeName}/${sliceId}`; + run(`git checkout -b "${sliceBranch}"`, wtPath); + for (const c of commits) { + writeFileSync(join(wtPath, c.file), c.content); + run("git add .", wtPath); + run(`git commit -m "${c.message}"`, wtPath); + } + const milestoneBranch = `milestone/${milestoneId}`; + run(`git checkout "${milestoneBranch}"`, wtPath); + run(`git merge --no-ff "${sliceBranch}" -m "merge ${sliceId}: ${sliceTitle}"`, wtPath); +} + +test("#2151 bug 1: auto-stash unblocks merge when unrelated files are dirty", () => { + const repo = createTempRepo(); + try { + const wtPath = createAutoWorktree(repo, "M200"); + + addSliceToMilestone(repo, wtPath, "M200", "S01", "Stash test", [ + { file: "stash-test.ts", content: "export const stash = true;\n", message: "add stash test" }, + ]); + + // Dirty an unrelated tracked file in the project root — this previously + // blocked the squash merge with "local changes would be overwritten". + writeFileSync(join(repo, "README.md"), "# modified locally\n"); + + const roadmap = makeRoadmap("M200", "Auto-stash test", [ + { id: "S01", title: "Stash test" }, + ]); + + // Should succeed — the dirty README.md is auto-stashed before merge. + const result = mergeMilestoneToMain(repo, "M200", roadmap); + assert.ok(result.commitMessage.includes("feat(M200)"), "merge succeeds with dirty unrelated file"); + assert.ok(existsSync(join(repo, "stash-test.ts")), "milestone code merged to main"); + + // Verify the dirty file was restored (stash popped). + const readmeContent = readFileSync(join(repo, "README.md"), "utf-8"); + assert.equal(readmeContent, "# modified locally\n", "stash popped — dirty file restored after merge"); + } finally { + rmSync(repo, { recursive: true, force: true }); + } +}); + +test("#2151 bug 2: nativeMergeSquash returns dirty filenames", async () => { + const { nativeMergeSquash } = await import("../native-git-bridge.ts"); + const repo = createTempRepo(); + try { + run("git checkout -b milestone/M210", repo); + writeFileSync(join(repo, "overlap.ts"), "export const overlap = true;\n"); + run("git add .", repo); + run('git commit -m "add overlap"', repo); + run("git checkout main", repo); + + // Create the same file as a dirty local change + writeFileSync(join(repo, "overlap.ts"), "// local dirty version\n"); + + const result = nativeMergeSquash(repo, "milestone/M210"); + assert.equal(result.success, false, "merge reports failure"); + assert.ok( + result.conflicts.includes("__dirty_working_tree__"), + "conflicts include __dirty_working_tree__ sentinel", + ); + assert.ok( + Array.isArray(result.dirtyFiles) && result.dirtyFiles.length > 0, + "dirtyFiles array is populated", + ); + assert.ok( + result.dirtyFiles!.includes("overlap.ts"), + "dirtyFiles includes the actual dirty file name", + ); + } finally { + run("git checkout -- . 2>/dev/null || true", repo); + rmSync(repo, { recursive: true, force: true }); + } +}); diff --git a/src/resources/extensions/gsd/tests/auto-worktree-milestone-merge.test.ts b/src/resources/extensions/gsd/tests/auto-worktree-milestone-merge.test.ts index a2bb897f6..0a24524df 100644 --- a/src/resources/extensions/gsd/tests/auto-worktree-milestone-merge.test.ts +++ b/src/resources/extensions/gsd/tests/auto-worktree-milestone-merge.test.ts @@ -463,8 +463,11 @@ async function main(): Promise { assertTrue(existsSync(join(repo, "sync-test.ts")), "sync-test.ts on main after merge"); } - // ─── Test 11: #1738 Bug 1+2 — dirty tree merge preserves branch end-to-end ── - console.log("\n=== #1738 e2e: dirty tree rejection preserves branch ==="); + // ─── Test 11: #1738 Bug 1+2 → #2151: dirty tree auto-stashed, merge succeeds ── + // Before #2151, a conflicting dirty file in the project root would cause + // the squash merge to reject. Now auto-stash moves it out of the way, + // the merge succeeds, and the user's local file goes to the stash. + console.log("\n=== #2151: dirty tree auto-stashed, merge succeeds ==="); { const repo = freshRepo(); const wtPath = createAutoWorktree(repo, "M100"); @@ -473,31 +476,21 @@ async function main(): Promise { { file: "e2e.ts", content: "export const e2e = true;\n", message: "add e2e" }, ]); + // Create a conflicting local file — previously blocked the merge. writeFileSync(join(repo, "e2e.ts"), "// conflicting local file\n"); const roadmap = makeRoadmap("M100", "E2E dirty tree", [ { id: "S01", title: "E2E test" }, ]); - let threw = false; - let errorMsg = ""; - try { - mergeMilestoneToMain(repo, "M100", roadmap); - } catch (err: unknown) { - threw = true; - errorMsg = err instanceof Error ? err.message : String(err); - } - assertTrue(threw, "#1738 e2e: throws on dirty working tree"); - assertTrue( - errorMsg.includes("dirty") || errorMsg.includes("untracked") || errorMsg.includes("overwritten"), - "#1738 e2e: error identifies dirty tree cause", - ); + // With auto-stash (#2151), the merge should succeed. + const result = mergeMilestoneToMain(repo, "M100", roadmap); + assertTrue(result.commitMessage.includes("feat(M100)"), "#2151: merge succeeds after auto-stash"); - const branches = run("git branch", repo); - assertTrue( - branches.includes("milestone/M100"), - "#1738 e2e: milestone branch preserved on dirty tree rejection", - ); + // The milestone code should be on main. + assertTrue(existsSync(join(repo, "e2e.ts")), "#2151: e2e.ts merged to main"); + const content = readFileSync(join(repo, "e2e.ts"), "utf-8"); + assertEq(content, "export const e2e = true;\n", "#2151: merged content is from milestone branch"); } // ─── Test 12: Throw on unanchored code changes after empty commit (#1792) ─ @@ -771,6 +764,8 @@ async function main(): Promise { assertTrue(existsSync(join(repo, "real-code.ts")), "real-code.ts merged to main"); } + // Tests 20 and 21 for #2151 are in auto-stash-merge.test.ts (node:test format). + } finally { process.chdir(savedCwd); for (const d of tempDirs) { diff --git a/src/resources/extensions/gsd/tests/derive-state-db.test.ts b/src/resources/extensions/gsd/tests/derive-state-db.test.ts index ab59d0325..8654526fa 100644 --- a/src/resources/extensions/gsd/tests/derive-state-db.test.ts +++ b/src/resources/extensions/gsd/tests/derive-state-db.test.ts @@ -745,6 +745,7 @@ async function main(): Promise { "UPDATE slices SET replan_triggered_at = :ts WHERE milestone_id = :mid AND id = :sid", ).run({ ":ts": new Date().toISOString(), ":mid": "M001", ":sid": "S01" }); + invalidateStateCache(); const dbState = await deriveStateFromDb(base); @@ -786,7 +787,9 @@ async function main(): Promise { const elapsed = performance.now() - start; console.log(` deriveStateFromDb() took ${elapsed.toFixed(3)}ms`); - assertTrue(elapsed < 1, `perf-db: deriveStateFromDb() <1ms (got ${elapsed.toFixed(3)}ms)`); + // Use 10ms threshold — catches real regressions without flaking on + // CI runners under load (1ms threshold failed at 1.050ms on GitHub Actions) + assertTrue(elapsed < 10, `perf-db: deriveStateFromDb() <10ms (got ${elapsed.toFixed(3)}ms)`); closeDatabase(); } finally { diff --git a/src/resources/extensions/gsd/tests/doctor-environment-worktree.test.ts b/src/resources/extensions/gsd/tests/doctor-environment-worktree.test.ts new file mode 100644 index 000000000..0a26e0dd2 --- /dev/null +++ b/src/resources/extensions/gsd/tests/doctor-environment-worktree.test.ts @@ -0,0 +1,175 @@ +/** + * doctor-environment-worktree.test.ts — Worktree-aware dependency checks (#2303). + * + * Reproduction: doctor-environment `checkDependenciesInstalled` falsely reports + * `env_dependencies` error inside auto-worktrees because `node_modules` is + * absent by design (worktrees symlink to the project root's node_modules and + * the symlink may not yet exist at check time). + * + * Fix: when the basePath contains `.gsd/worktrees/`, resolve the project root + * and check its node_modules instead. + */ + +import { mkdtempSync, mkdirSync, writeFileSync, rmSync, symlinkSync } from "node:fs"; +import { join, dirname } from "node:path"; +import { tmpdir } from "node:os"; + +import { + runEnvironmentChecks, + environmentResultsToDoctorIssues, + checkEnvironmentHealth, +} from "../doctor-environment.ts"; +import { createTestContext } from "./test-helpers.ts"; + +const { assertEq, assertTrue, report } = createTestContext(); + +/** Create a directory tree with files. */ +function createDir(files: Record = {}): string { + const dir = mkdtempSync(join(tmpdir(), "gsd-wt-env-")); + for (const [name, content] of Object.entries(files)) { + const filePath = join(dir, name); + mkdirSync(dirname(filePath), { recursive: true }); + writeFileSync(filePath, content); + } + return dir; +} + +async function main(): Promise { + const cleanups: string[] = []; + + try { + // ── Reproduction: worktree path without node_modules ─────────────── + console.log("\n=== worktree: missing node_modules should NOT error when project root has them ==="); + { + // Simulate project root with node_modules + const projectRoot = createDir({ + "package.json": JSON.stringify({ name: "test-project" }), + }); + mkdirSync(join(projectRoot, "node_modules"), { recursive: true }); + cleanups.push(projectRoot); + + // Simulate a worktree inside .gsd/worktrees// + const worktreeDir = join(projectRoot, ".gsd", "worktrees", "slice-abc"); + mkdirSync(worktreeDir, { recursive: true }); + writeFileSync( + join(worktreeDir, "package.json"), + JSON.stringify({ name: "test-project" }), + ); + // node_modules intentionally absent — this is the bug scenario + + const results = runEnvironmentChecks(worktreeDir); + const depsCheck = results.find(r => r.name === "dependencies"); + + // Before fix: this would return status "error" with "node_modules missing" + // After fix: should return "ok" because project root has node_modules + assertTrue( + depsCheck === undefined || depsCheck.status !== "error", + "worktree should not report env_dependencies error when project root has node_modules", + ); + } + + // ── Worktree with NO node_modules anywhere should still error ────── + console.log("\n=== worktree: missing node_modules everywhere should still error ==="); + { + const projectRoot = createDir({ + "package.json": JSON.stringify({ name: "test-project" }), + }); + cleanups.push(projectRoot); + // No node_modules at project root either + + const worktreeDir = join(projectRoot, ".gsd", "worktrees", "slice-xyz"); + mkdirSync(worktreeDir, { recursive: true }); + writeFileSync( + join(worktreeDir, "package.json"), + JSON.stringify({ name: "test-project" }), + ); + + const results = runEnvironmentChecks(worktreeDir); + const depsCheck = results.find(r => r.name === "dependencies"); + assertTrue(depsCheck !== undefined, "dependencies check still runs in worktree"); + assertEq(depsCheck!.status, "error", "reports error when node_modules missing everywhere"); + } + + // ── Worktree env_dependencies not in doctor issues ────────────────── + console.log("\n=== worktree: checkEnvironmentHealth should not add env_dependencies for valid worktree ==="); + { + const projectRoot = createDir({ + "package.json": JSON.stringify({ name: "test-project" }), + }); + mkdirSync(join(projectRoot, "node_modules"), { recursive: true }); + cleanups.push(projectRoot); + + const worktreeDir = join(projectRoot, ".gsd", "worktrees", "slice-pr"); + mkdirSync(worktreeDir, { recursive: true }); + writeFileSync( + join(worktreeDir, "package.json"), + JSON.stringify({ name: "test-project" }), + ); + + const issues: any[] = []; + await checkEnvironmentHealth(worktreeDir, issues); + const depIssue = issues.find(i => i.code === "env_dependencies"); + assertEq( + depIssue, + undefined, + "no env_dependencies issue for worktree with project root node_modules", + ); + } + + // ── Non-worktree path still catches missing node_modules ─────────── + console.log("\n=== non-worktree: missing node_modules still detected ==="); + { + const dir = createDir({ + "package.json": JSON.stringify({ name: "test" }), + }); + cleanups.push(dir); + const results = runEnvironmentChecks(dir); + const depsCheck = results.find(r => r.name === "dependencies"); + assertTrue(depsCheck !== undefined, "dependencies check runs"); + assertEq(depsCheck!.status, "error", "missing node_modules is an error for non-worktree"); + } + + // ── GSD_WORKTREE env var detection ───────────────────────────────── + console.log("\n=== GSD_WORKTREE env: should resolve project root node_modules ==="); + { + const projectRoot = createDir({ + "package.json": JSON.stringify({ name: "test-project" }), + }); + mkdirSync(join(projectRoot, "node_modules"), { recursive: true }); + cleanups.push(projectRoot); + + // Create a directory that doesn't have .gsd/worktrees in path but + // has GSD_WORKTREE env pointing to project root + const someDir = createDir({ + "package.json": JSON.stringify({ name: "test-project" }), + }); + cleanups.push(someDir); + + const origEnv = process.env.GSD_WORKTREE; + try { + process.env.GSD_WORKTREE = projectRoot; + const results = runEnvironmentChecks(someDir); + const depsCheck = results.find(r => r.name === "dependencies"); + assertTrue( + depsCheck === undefined || depsCheck.status !== "error", + "GSD_WORKTREE env allows fallback to project root node_modules", + ); + } finally { + if (origEnv === undefined) { + delete process.env.GSD_WORKTREE; + } else { + process.env.GSD_WORKTREE = origEnv; + } + } + } + + } finally { + for (const dir of cleanups) { + try { rmSync(dir, { recursive: true, force: true }); } catch { /* ignore */ } + } + } + + report(); +} + +main(); diff --git a/src/resources/extensions/gsd/tests/forensics-dedup.test.ts b/src/resources/extensions/gsd/tests/forensics-dedup.test.ts new file mode 100644 index 000000000..b08bd95a2 --- /dev/null +++ b/src/resources/extensions/gsd/tests/forensics-dedup.test.ts @@ -0,0 +1,48 @@ +import { describe, it } from "node:test"; +import assert from "node:assert/strict"; +import { readFileSync } from "node:fs"; +import { join, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const gsdDir = join(__dirname, ".."); + +describe("forensics dedup (#2096)", () => { + it("forensics_dedup is in KNOWN_PREFERENCE_KEYS", () => { + const source = readFileSync(join(gsdDir, "preferences-types.ts"), "utf-8"); + assert.ok(source.includes('"forensics_dedup"'), + "KNOWN_PREFERENCE_KEYS must contain forensics_dedup"); + assert.ok(source.includes("forensics_dedup?: boolean"), + "GSDPreferences must declare forensics_dedup as optional boolean"); + }); + + it("forensics prompt contains {{dedupSection}} placeholder", () => { + const prompt = readFileSync(join(gsdDir, "prompts", "forensics.md"), "utf-8"); + assert.ok(prompt.includes("{{dedupSection}}"), + "forensics.md must contain {{dedupSection}} placeholder"); + }); + + it("DEDUP_PROMPT_SECTION contains required search commands", async () => { + const source = readFileSync(join(gsdDir, "forensics.ts"), "utf-8"); + assert.ok(source.includes("DEDUP_PROMPT_SECTION"), "forensics.ts must define DEDUP_PROMPT_SECTION"); + assert.ok(source.includes("gh issue list --repo gsd-build/gsd-2 --state closed")); + assert.ok(source.includes("gh pr list --repo gsd-build/gsd-2 --state open")); + assert.ok(source.includes("gh pr list --repo gsd-build/gsd-2 --state merged")); + }); + + it("handleForensics checks forensics_dedup preference", () => { + const source = readFileSync(join(gsdDir, "forensics.ts"), "utf-8"); + assert.ok(source.includes("forensics_dedup"), + "handleForensics must reference forensics_dedup preference"); + assert.ok(source.includes("dedupSection"), + "handleForensics must pass dedupSection to loadPrompt"); + }); + + it("first-time opt-in shows when preference is undefined", () => { + const source = readFileSync(join(gsdDir, "forensics.ts"), "utf-8"); + assert.ok(source.includes("=== undefined"), + "first-time detection must check for undefined (not false)"); + assert.ok(source.includes("Duplicate detection available") || source.includes("duplicate detection"), + "opt-in notice must mention duplicate detection"); + }); +}); diff --git a/src/resources/extensions/gsd/tests/freeform-decisions.test.ts b/src/resources/extensions/gsd/tests/freeform-decisions.test.ts new file mode 100644 index 000000000..6a9addb44 --- /dev/null +++ b/src/resources/extensions/gsd/tests/freeform-decisions.test.ts @@ -0,0 +1,240 @@ +import { createTestContext } from './test-helpers.ts'; +import * as path from 'node:path'; +import * as os from 'node:os'; +import * as fs from 'node:fs'; +import { + openDatabase, + closeDatabase, +} from '../gsd-db.ts'; +import { + parseDecisionsTable, +} from '../md-importer.ts'; +import { + saveDecisionToDb, +} from '../db-writer.ts'; + +const { assertEq, assertTrue, report } = createTestContext(); + +// ═══════════════════════════════════════════════════════════════════════════ +// Helpers +// ═══════════════════════════════════════════════════════════════════════════ + +function makeTmpDir(): string { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'gsd-freeform-')); + fs.mkdirSync(path.join(dir, '.gsd'), { recursive: true }); + return dir; +} + +function cleanupDir(dir: string): void { + try { + fs.rmSync(dir, { recursive: true, force: true }); + } catch { /* swallow */ } +} + +// ═══════════════════════════════════════════════════════════════════════════ +// Bug reproduction: freeform DECISIONS.md content destroyed (#2301) +// ═══════════════════════════════════════════════════════════════════════════ + +console.log('\n── parseDecisionsTable silently drops freeform content ──'); + +{ + const freeform = `# Project Decisions + +## Architecture +We decided to use a microservices architecture because monoliths don't scale. + +## Database +PostgreSQL was chosen for its reliability and JSONB support. + +## Deployment +- Kubernetes for orchestration +- Helm charts for packaging +`; + + const parsed = parseDecisionsTable(freeform); + assertEq(parsed.length, 0, 'freeform content yields zero parsed decisions (expected — it is not a table)'); +} + +console.log('\n── saveDecisionToDb destroys freeform DECISIONS.md content ──'); + +{ + const tmpDir = makeTmpDir(); + const dbPath = path.join(tmpDir, '.gsd', 'gsd.db'); + const mdPath = path.join(tmpDir, '.gsd', 'DECISIONS.md'); + openDatabase(dbPath); + + const freeformContent = `# Project Decisions + +## Architecture +We decided to use a microservices architecture because monoliths don't scale. + +## Database +PostgreSQL was chosen for its reliability and JSONB support. + +## Deployment +- Kubernetes for orchestration +- Helm charts for packaging +`; + + // Pre-populate DECISIONS.md with freeform content + fs.writeFileSync(mdPath, freeformContent, 'utf-8'); + + try { + // Save a new decision — this should NOT destroy the freeform content + const result = await saveDecisionToDb({ + scope: 'testing', + decision: 'Use Jest for unit tests', + choice: 'Jest', + rationale: 'Well-known, good DX', + when_context: 'M001', + }, tmpDir); + + assertEq(result.id, 'D001', 'decision ID assigned correctly'); + + // Read back the file + const afterContent = fs.readFileSync(mdPath, 'utf-8'); + + // The freeform content MUST still be present + assertTrue( + afterContent.includes('microservices architecture'), + 'freeform architecture section preserved after saveDecisionToDb', + ); + assertTrue( + afterContent.includes('PostgreSQL was chosen'), + 'freeform database section preserved after saveDecisionToDb', + ); + assertTrue( + afterContent.includes('Kubernetes for orchestration'), + 'freeform deployment section preserved after saveDecisionToDb', + ); + + // The new decision MUST also be present + assertTrue( + afterContent.includes('D001'), + 'new decision D001 present in file', + ); + assertTrue( + afterContent.includes('Use Jest for unit tests'), + 'new decision text present in file', + ); + + // Save a second decision — freeform content must still survive + const result2 = await saveDecisionToDb({ + scope: 'ci', + decision: 'Use GitHub Actions for CI', + choice: 'GitHub Actions', + rationale: 'Native integration', + when_context: 'M001', + }, tmpDir); + + assertEq(result2.id, 'D002', 'second decision ID assigned correctly'); + + const afterContent2 = fs.readFileSync(mdPath, 'utf-8'); + + assertTrue( + afterContent2.includes('microservices architecture'), + 'freeform content still preserved after second save', + ); + assertTrue( + afterContent2.includes('D001'), + 'first decision still present after second save', + ); + assertTrue( + afterContent2.includes('D002'), + 'second decision present after second save', + ); + assertTrue( + afterContent2.includes('Use GitHub Actions for CI'), + 'second decision text present in file', + ); + } finally { + closeDatabase(); + cleanupDir(tmpDir); + } +} + +console.log('\n── saveDecisionToDb with table-format DECISIONS.md still regenerates normally ──'); + +{ + const tmpDir = makeTmpDir(); + const dbPath = path.join(tmpDir, '.gsd', 'gsd.db'); + const mdPath = path.join(tmpDir, '.gsd', 'DECISIONS.md'); + openDatabase(dbPath); + + // Pre-populate with canonical table format + const tableContent = `# Decisions Register + + + +| # | When | Scope | Decision | Choice | Rationale | Revisable? | Made By | +|---|------|-------|----------|--------|-----------|------------|---------| +| D001 | M001 | arch | Use REST API | REST | Simpler | Yes | human | +`; + + fs.writeFileSync(mdPath, tableContent, 'utf-8'); + + try { + const result = await saveDecisionToDb({ + scope: 'testing', + decision: 'Use Vitest', + choice: 'Vitest', + rationale: 'Fast', + when_context: 'M001', + }, tmpDir); + + // The pre-existing table decision was NOT in DB, so it won't appear after regen. + // But the new decision should be there. + assertEq(result.id, 'D001', 'gets D001 since DB was empty'); + + const afterContent = fs.readFileSync(mdPath, 'utf-8'); + // Table-format file gets fully regenerated — this is the normal path + assertTrue( + afterContent.includes('# Decisions Register'), + 'table-format file still has header after save', + ); + assertTrue( + afterContent.includes('Use Vitest'), + 'new decision present in regenerated table', + ); + } finally { + closeDatabase(); + cleanupDir(tmpDir); + } +} + +console.log('\n── saveDecisionToDb with no existing DECISIONS.md creates table ──'); + +{ + const tmpDir = makeTmpDir(); + const dbPath = path.join(tmpDir, '.gsd', 'gsd.db'); + const mdPath = path.join(tmpDir, '.gsd', 'DECISIONS.md'); + openDatabase(dbPath); + + // No DECISIONS.md exists at all + assertTrue(!fs.existsSync(mdPath), 'DECISIONS.md does not exist initially'); + + try { + const result = await saveDecisionToDb({ + scope: 'arch', + decision: 'Brand new decision', + choice: 'Option A', + rationale: 'Best fit', + }, tmpDir); + + assertEq(result.id, 'D001', 'first decision gets D001'); + assertTrue(fs.existsSync(mdPath), 'DECISIONS.md created'); + + const content = fs.readFileSync(mdPath, 'utf-8'); + assertTrue(content.includes('# Decisions Register'), 'new file has header'); + assertTrue(content.includes('Brand new decision'), 'new file has decision'); + } finally { + closeDatabase(); + cleanupDir(tmpDir); + } +} + +// ═══════════════════════════════════════════════════════════════════════════ + +report(); diff --git a/src/resources/extensions/gsd/tests/git-service.test.ts b/src/resources/extensions/gsd/tests/git-service.test.ts index 540829808..d824606db 100644 --- a/src/resources/extensions/gsd/tests/git-service.test.ts +++ b/src/resources/extensions/gsd/tests/git-service.test.ts @@ -251,8 +251,8 @@ async function main(): Promise { assertEq( RUNTIME_EXCLUSION_PATHS.length, - 9, - "exactly 9 runtime exclusion paths" + 13, + "exactly 13 runtime exclusion paths" ); const expectedPaths = [ @@ -264,6 +264,10 @@ async function main(): Promise { ".gsd/completed-units.json", ".gsd/STATE.md", ".gsd/gsd.db", + ".gsd/gsd.db-shm", + ".gsd/gsd.db-wal", + ".gsd/journal/", + ".gsd/doctor-history.jsonl", ".gsd/DISCUSSION-MANIFEST.json", ]; @@ -1411,16 +1415,14 @@ async function main(): Promise { rmSync(repo, { recursive: true, force: true }); } - // ─── autoCommit: symlinked .gsd stages new milestone artifacts (#2104) ── + // ─── autoCommit: symlinked .gsd does NOT stage milestone artifacts (#2247) ── - console.log("\n=== autoCommit: symlinked .gsd stages new milestone artifacts (#2104) ==="); + console.log("\n=== autoCommit: symlinked .gsd does NOT stage milestone artifacts (#2247) ==="); { - // Reproduction: when .gsd is a symlink (external state project), - // autoCommit silently fails to stage NEW .gsd/milestones/ files because: - // 1. nativeAddAllWithExclusions falls back to plain `git add -A` (symlink) - // 2. `.gsd` is in .gitignore → new .gsd/ files are invisible to `git add` - // The fix: smartStage() force-adds .gsd/milestones/ after the normal staging. + // When .gsd is a symlink (external state project), .gsd/ files live outside + // the repo by design. smartStage() must NOT force-stage them into git — the + // .gitignore exclusion is correct and intentional. const repo = initTempRepo(); // Create an external .gsd directory and symlink it into the repo @@ -1433,7 +1435,8 @@ async function main(): Promise { // .gitignore blocks .gsd (as ensureGitignore would do for symlink projects) writeFileSync(join(repo, ".gitignore"), ".gsd\n"); - run("git add .gitignore && git commit -m 'add gitignore'", repo); + run('git add .gitignore', repo); + run('git commit -m "add gitignore"', repo); // Simulate new milestone artifacts created during execution writeFileSync(join(externalGsd, "milestones", "M009", "M009-SUMMARY.md"), "# M009 Summary"); @@ -1449,12 +1452,8 @@ async function main(): Promise { const committed = run("git show --name-only HEAD", repo); assertTrue(committed.includes("src/feature.ts"), "symlink autoCommit: source file committed"); - assertTrue(committed.includes(".gsd/milestones/M009/M009-SUMMARY.md"), - "symlink autoCommit: new M009-SUMMARY.md is committed (not silently dropped)"); - assertTrue(committed.includes(".gsd/milestones/M009/S01-SUMMARY.md"), - "symlink autoCommit: new S01-SUMMARY.md is committed"); - assertTrue(committed.includes(".gsd/milestones/M009/T01-VERIFY.json"), - "symlink autoCommit: new T01-VERIFY.json is committed"); + assertTrue(!committed.includes(".gsd/milestones/"), + "symlink autoCommit: .gsd/milestones/ files are NOT staged (external state stays external)"); try { rmSync(repo, { recursive: true, force: true }); } catch {} try { rmSync(externalGsd, { recursive: true, force: true }); } catch {} diff --git a/src/resources/extensions/gsd/tests/gsd-recover.test.ts b/src/resources/extensions/gsd/tests/gsd-recover.test.ts index f0c1d43c8..0f4df9cb7 100644 --- a/src/resources/extensions/gsd/tests/gsd-recover.test.ts +++ b/src/resources/extensions/gsd/tests/gsd-recover.test.ts @@ -55,6 +55,7 @@ const ROADMAP_M001 = `# M001: Recovery Test - All recovery tests pass - State matches after round-trip + ## Slices - [x] **S01: Setup** \`risk:low\` \`depends:[]\` @@ -312,6 +313,7 @@ async function main() { } } + // ─── Test (b): Idempotent recovery — double recover ──────────────────── console.log('\n=== recover: idempotent — double recovery produces same state ==='); { diff --git a/src/resources/extensions/gsd/tests/journal.test.ts b/src/resources/extensions/gsd/tests/journal.test.ts index 5808b67bb..96a39e064 100644 --- a/src/resources/extensions/gsd/tests/journal.test.ts +++ b/src/resources/extensions/gsd/tests/journal.test.ts @@ -1,4 +1,4 @@ -import test from "node:test"; +import { describe, test, beforeEach, afterEach } from "node:test"; import assert from "node:assert/strict"; import { mkdirSync, @@ -46,9 +46,12 @@ function makeEntry(overrides: Partial = {}): JournalEntry { // ─── emitJournalEvent ───────────────────────────────────────────────────────── -test("emitJournalEvent creates journal directory and JSONL file", () => { - const base = makeTmpBase(); - try { +describe("emitJournalEvent", () => { + let base: string; + beforeEach(() => { base = makeTmpBase(); }); + afterEach(() => { cleanup(base); }); + + test("creates journal directory and JSONL file", () => { const entry = makeEntry(); emitJournalEvent(base, entry); @@ -61,14 +64,9 @@ test("emitJournalEvent creates journal directory and JSONL file", () => { assert.equal(parsed.flowId, entry.flowId); assert.equal(parsed.seq, entry.seq); assert.equal(parsed.eventType, entry.eventType); - } finally { - cleanup(base); - } -}); + }); -test("emitJournalEvent appends multiple lines to the same file", () => { - const base = makeTmpBase(); - try { + test("appends multiple lines to the same file", () => { emitJournalEvent(base, makeEntry({ seq: 0 })); emitJournalEvent(base, makeEntry({ seq: 1, eventType: "dispatch-match" })); emitJournalEvent(base, makeEntry({ seq: 2, eventType: "unit-start" })); @@ -82,26 +80,9 @@ test("emitJournalEvent appends multiple lines to the same file", () => { assert.equal(parsed[1].seq, 1); assert.equal(parsed[2].seq, 2); assert.equal(parsed[1].eventType, "dispatch-match"); - } finally { - cleanup(base); - } -}); + }); -test("emitJournalEvent auto-creates nonexistent parent directory", () => { - const base = join(tmpdir(), `gsd-journal-test-${randomUUID()}`); - // Don't create .gsd/ — emitJournalEvent should handle it via mkdirSync recursive - try { - emitJournalEvent(base, makeEntry()); - const filePath = join(base, ".gsd", "journal", "2025-03-21.jsonl"); - assert.ok(existsSync(filePath), "File should exist even when parent dirs did not"); - } finally { - cleanup(base); - } -}); - -test("emitJournalEvent preserves optional fields (rule, causedBy, data)", () => { - const base = makeTmpBase(); - try { + test("preserves optional fields (rule, causedBy, data)", () => { const entry = makeEntry({ rule: "my-dispatch-rule", causedBy: { flowId: "flow-prior", seq: 3 }, @@ -115,9 +96,42 @@ test("emitJournalEvent preserves optional fields (rule, causedBy, data)", () => assert.deepEqual(parsed.causedBy, { flowId: "flow-prior", seq: 3 }); assert.equal(parsed.data.unitId, "M001/S01/T01"); assert.equal(parsed.data.status, "ok"); - } finally { - cleanup(base); - } + }); + + test("silently catches read-only directory errors", () => { + const journalDir = join(base, ".gsd", "journal"); + mkdirSync(journalDir, { recursive: true }); + + // Make the journal directory read-only + chmodSync(journalDir, 0o444); + + // Should not throw + assert.doesNotThrow(() => { + emitJournalEvent(base, makeEntry()); + }); + + // Restore permissions for cleanup + try { + chmodSync(journalDir, 0o755); + } catch { + /* */ + } + }); +}); + +describe("emitJournalEvent — auto-creates parent directory", () => { + let base: string; + beforeEach(() => { + base = join(tmpdir(), `gsd-journal-test-${randomUUID()}`); + // Don't create .gsd/ — emitJournalEvent should handle it via mkdirSync recursive + }); + afterEach(() => { cleanup(base); }); + + test("auto-creates nonexistent parent directory", () => { + emitJournalEvent(base, makeEntry()); + const filePath = join(base, ".gsd", "journal", "2025-03-21.jsonl"); + assert.ok(existsSync(filePath), "File should exist even when parent dirs did not"); + }); }); test("emitJournalEvent silently catches write errors (no throw)", () => { @@ -127,35 +141,14 @@ test("emitJournalEvent silently catches write errors (no throw)", () => { }); }); -test("emitJournalEvent silently catches read-only directory errors", () => { - const base = makeTmpBase(); - const journalDir = join(base, ".gsd", "journal"); - mkdirSync(journalDir, { recursive: true }); - - try { - // Make the journal directory read-only - chmodSync(journalDir, 0o444); - - // Should not throw - assert.doesNotThrow(() => { - emitJournalEvent(base, makeEntry()); - }); - } finally { - // Restore permissions for cleanup - try { - chmodSync(journalDir, 0o755); - } catch { - /* */ - } - cleanup(base); - } -}); - // ─── Daily Rotation ─────────────────────────────────────────────────────────── -test("daily rotation: events with different dates go to different files", () => { - const base = makeTmpBase(); - try { +describe("daily rotation", () => { + let base: string; + beforeEach(() => { base = makeTmpBase(); }); + afterEach(() => { cleanup(base); }); + + test("events with different dates go to different files", () => { emitJournalEvent(base, makeEntry({ ts: "2025-03-20T23:59:59.000Z" })); emitJournalEvent(base, makeEntry({ ts: "2025-03-21T00:00:01.000Z" })); emitJournalEvent(base, makeEntry({ ts: "2025-03-22T12:00:00.000Z" })); @@ -172,16 +165,17 @@ test("daily rotation: events with different dates go to different files", () => .split("\n"); assert.equal(lines.length, 1, `${date}.jsonl should have 1 line`); } - } finally { - cleanup(base); - } + }); }); // ─── queryJournal ───────────────────────────────────────────────────────────── -test("queryJournal returns all entries when no filters provided", () => { - const base = makeTmpBase(); - try { +describe("queryJournal", () => { + let base: string; + beforeEach(() => { base = makeTmpBase(); }); + afterEach(() => { cleanup(base); }); + + test("returns all entries when no filters provided", () => { emitJournalEvent(base, makeEntry({ seq: 0 })); emitJournalEvent(base, makeEntry({ seq: 1, eventType: "dispatch-match" })); @@ -189,14 +183,9 @@ test("queryJournal returns all entries when no filters provided", () => { assert.equal(results.length, 2); assert.equal(results[0].seq, 0); assert.equal(results[1].seq, 1); - } finally { - cleanup(base); - } -}); + }); -test("queryJournal filters by flowId", () => { - const base = makeTmpBase(); - try { + test("filters by flowId", () => { emitJournalEvent(base, makeEntry({ flowId: "flow-aaa", seq: 0 })); emitJournalEvent(base, makeEntry({ flowId: "flow-bbb", seq: 1 })); emitJournalEvent(base, makeEntry({ flowId: "flow-aaa", seq: 2 })); @@ -204,14 +193,9 @@ test("queryJournal filters by flowId", () => { const results = queryJournal(base, { flowId: "flow-aaa" }); assert.equal(results.length, 2); assert.ok(results.every(e => e.flowId === "flow-aaa")); - } finally { - cleanup(base); - } -}); + }); -test("queryJournal filters by eventType", () => { - const base = makeTmpBase(); - try { + test("filters by eventType", () => { emitJournalEvent(base, makeEntry({ eventType: "iteration-start", seq: 0 })); emitJournalEvent(base, makeEntry({ eventType: "dispatch-match", seq: 1 })); emitJournalEvent(base, makeEntry({ eventType: "unit-start", seq: 2 })); @@ -220,14 +204,9 @@ test("queryJournal filters by eventType", () => { const results = queryJournal(base, { eventType: "dispatch-match" }); assert.equal(results.length, 2); assert.ok(results.every(e => e.eventType === "dispatch-match")); - } finally { - cleanup(base); - } -}); + }); -test("queryJournal filters by unitId (from data.unitId)", () => { - const base = makeTmpBase(); - try { + test("filters by unitId (from data.unitId)", () => { emitJournalEvent( base, makeEntry({ seq: 0, data: { unitId: "M001/S01/T01" } }), @@ -249,14 +228,9 @@ test("queryJournal filters by unitId (from data.unitId)", () => { e => (e.data as Record)?.unitId === "M001/S01/T01", ), ); - } finally { - cleanup(base); - } -}); + }); -test("queryJournal filters by time range (after/before)", () => { - const base = makeTmpBase(); - try { + test("filters by time range (after/before)", () => { emitJournalEvent(base, makeEntry({ ts: "2025-03-20T08:00:00.000Z", seq: 0 })); emitJournalEvent(base, makeEntry({ ts: "2025-03-21T10:00:00.000Z", seq: 1 })); emitJournalEvent(base, makeEntry({ ts: "2025-03-21T15:00:00.000Z", seq: 2 })); @@ -276,14 +250,9 @@ test("queryJournal filters by time range (after/before)", () => { before: "2025-03-21T23:59:59.000Z", }); assert.equal(rangeResults.length, 2, "2 entries within 2025-03-21"); - } finally { - cleanup(base); - } -}); + }); -test("queryJournal combines multiple filters", () => { - const base = makeTmpBase(); - try { + test("combines multiple filters", () => { emitJournalEvent( base, makeEntry({ flowId: "flow-aaa", eventType: "unit-start", seq: 0 }), @@ -304,25 +273,9 @@ test("queryJournal combines multiple filters", () => { assert.equal(results.length, 1); assert.equal(results[0].flowId, "flow-aaa"); assert.equal(results[0].eventType, "unit-start"); - } finally { - cleanup(base); - } -}); + }); -test("queryJournal on nonexistent directory returns empty array", () => { - const base = join(tmpdir(), `gsd-journal-test-${randomUUID()}`); - // Don't create anything - try { - const results = queryJournal(base); - assert.deepEqual(results, []); - } finally { - cleanup(base); - } -}); - -test("queryJournal skips malformed JSON lines gracefully", () => { - const base = makeTmpBase(); - try { + test("skips malformed JSON lines gracefully", () => { const journalDir = join(base, ".gsd", "journal"); mkdirSync(journalDir, { recursive: true }); @@ -335,14 +288,9 @@ test("queryJournal skips malformed JSON lines gracefully", () => { assert.equal(results.length, 2, "Should skip the malformed line"); assert.equal(results[0].seq, 0); assert.equal(results[1].seq, 1); - } finally { - cleanup(base); - } -}); + }); -test("queryJournal reads across multiple daily files", () => { - const base = makeTmpBase(); - try { + test("reads across multiple daily files", () => { emitJournalEvent(base, makeEntry({ ts: "2025-03-20T12:00:00.000Z", seq: 0 })); emitJournalEvent(base, makeEntry({ ts: "2025-03-21T12:00:00.000Z", seq: 1 })); emitJournalEvent(base, makeEntry({ ts: "2025-03-22T12:00:00.000Z", seq: 2 })); @@ -353,14 +301,9 @@ test("queryJournal reads across multiple daily files", () => { assert.equal(results[0].ts, "2025-03-20T12:00:00.000Z"); assert.equal(results[1].ts, "2025-03-21T12:00:00.000Z"); assert.equal(results[2].ts, "2025-03-22T12:00:00.000Z"); - } finally { - cleanup(base); - } -}); + }); -test("queryJournal filters by rule", () => { - const base = makeTmpBase(); - try { + test("filters by rule", () => { emitJournalEvent( base, makeEntry({ seq: 0, eventType: "dispatch-match", rule: "dispatch-task" }), @@ -380,7 +323,19 @@ test("queryJournal filters by rule", () => { results.every(e => e.rule === "dispatch-task"), "All results should have rule === 'dispatch-task'", ); - } finally { - cleanup(base); - } + }); +}); + +describe("queryJournal — nonexistent directory", () => { + let base: string; + beforeEach(() => { + base = join(tmpdir(), `gsd-journal-test-${randomUUID()}`); + // Don't create anything + }); + afterEach(() => { cleanup(base); }); + + test("on nonexistent directory returns empty array", () => { + const results = queryJournal(base); + assert.deepEqual(results, []); + }); }); diff --git a/src/resources/extensions/gsd/tests/manifest-status.test.ts b/src/resources/extensions/gsd/tests/manifest-status.test.ts index 3020caa87..646eccec0 100644 --- a/src/resources/extensions/gsd/tests/manifest-status.test.ts +++ b/src/resources/extensions/gsd/tests/manifest-status.test.ts @@ -8,7 +8,7 @@ * Uses temp directories with real .gsd/milestones/M001/ structure. */ -import test from 'node:test'; +import { describe, test, beforeEach, afterEach } from 'node:test'; import assert from 'node:assert/strict'; import { mkdirSync, writeFileSync, rmSync } from 'node:fs'; import { join } from 'node:path'; @@ -30,12 +30,21 @@ function writeManifest(base: string, content: string): void { // ─── Mixed statuses ────────────────────────────────────────────────────────── -test('getManifestStatus: mixed statuses — categorizes entries correctly', async () => { - const tmp = makeTempDir('manifest-mixed'); - const savedVal = process.env.GSD_TEST_EXISTING_KEY_001; - try { +describe('getManifestStatus: mixed statuses', () => { + let tmp: string; + let savedVal: string | undefined; + beforeEach(() => { + tmp = makeTempDir('manifest-mixed'); + savedVal = process.env.GSD_TEST_EXISTING_KEY_001; process.env.GSD_TEST_EXISTING_KEY_001 = 'some-value'; + }); + afterEach(() => { + delete process.env.GSD_TEST_EXISTING_KEY_001; + if (savedVal !== undefined) process.env.GSD_TEST_EXISTING_KEY_001 = savedVal; + rmSync(tmp, { recursive: true, force: true }); + }); + test('categorizes entries correctly', async () => { writeManifest(tmp, `# Secrets Manifest **Milestone:** M001 @@ -80,18 +89,17 @@ test('getManifestStatus: mixed statuses — categorizes entries correctly', asyn assert.deepStrictEqual(result!.collected, ['COLLECTED_KEY']); assert.deepStrictEqual(result!.skipped, ['SKIPPED_KEY']); assert.deepStrictEqual(result!.existing, ['GSD_TEST_EXISTING_KEY_001']); - } finally { - delete process.env.GSD_TEST_EXISTING_KEY_001; - if (savedVal !== undefined) process.env.GSD_TEST_EXISTING_KEY_001 = savedVal; - rmSync(tmp, { recursive: true, force: true }); - } + }); }); // ─── All pending ───────────────────────────────────────────────────────────── -test('getManifestStatus: all pending — 3 pending entries, none in env', async () => { - const tmp = makeTempDir('manifest-pending'); - try { +describe('getManifestStatus: simple temp dir tests', () => { + let tmp: string; + beforeEach(() => { tmp = makeTempDir('manifest-test'); }); + afterEach(() => { rmSync(tmp, { recursive: true, force: true }); }); + + test('all pending — 3 pending entries, none in env', async () => { // Ensure none of these are in process.env delete process.env.PEND_A; delete process.env.PEND_B; @@ -133,16 +141,11 @@ test('getManifestStatus: all pending — 3 pending entries, none in env', async assert.deepStrictEqual(result!.collected, []); assert.deepStrictEqual(result!.skipped, []); assert.deepStrictEqual(result!.existing, []); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -// ─── All collected ─────────────────────────────────────────────────────────── + // ─── All collected ─────────────────────────────────────────────────────────── -test('getManifestStatus: all collected — 2 collected entries, none in env', async () => { - const tmp = makeTempDir('manifest-collected'); - try { + test('all collected — 2 collected entries, none in env', async () => { delete process.env.COLL_X; delete process.env.COLL_Y; @@ -174,64 +177,19 @@ test('getManifestStatus: all collected — 2 collected entries, none in env', as assert.deepStrictEqual(result!.collected, ['COLL_X', 'COLL_Y']); assert.deepStrictEqual(result!.skipped, []); assert.deepStrictEqual(result!.existing, []); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -// ─── Key in env overrides manifest status ──────────────────────────────────── + // ─── Missing manifest ──────────────────────────────────────────────────────── -test('getManifestStatus: key in env overrides manifest status — collected key in env goes to existing', async () => { - const tmp = makeTempDir('manifest-override'); - const savedVal = process.env.GSD_TEST_OVERRIDE_KEY; - try { - process.env.GSD_TEST_OVERRIDE_KEY = 'already-here'; - - writeManifest(tmp, `# Secrets Manifest - -**Milestone:** M001 -**Generated:** 2025-06-20T10:00:00Z - -### GSD_TEST_OVERRIDE_KEY - -**Service:** Override -**Status:** collected -**Destination:** dotenv - -1. Was collected but now in env -`); - - const result = await getManifestStatus(tmp, 'M001'); - assert.notStrictEqual(result, null); - assert.deepStrictEqual(result!.pending, []); - assert.deepStrictEqual(result!.collected, []); - assert.deepStrictEqual(result!.skipped, []); - assert.deepStrictEqual(result!.existing, ['GSD_TEST_OVERRIDE_KEY']); - } finally { - delete process.env.GSD_TEST_OVERRIDE_KEY; - if (savedVal !== undefined) process.env.GSD_TEST_OVERRIDE_KEY = savedVal; - rmSync(tmp, { recursive: true, force: true }); - } -}); - -// ─── Missing manifest ──────────────────────────────────────────────────────── - -test('getManifestStatus: missing manifest — returns null', async () => { - const tmp = makeTempDir('manifest-missing'); - try { + test('missing manifest — returns null', async () => { // No .gsd directory at all const result = await getManifestStatus(tmp, 'M001'); assert.strictEqual(result, null); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -// ─── Empty manifest (no entries) ───────────────────────────────────────────── + // ─── Empty manifest (no entries) ───────────────────────────────────────────── -test('getManifestStatus: empty manifest — exists but no H3 sections', async () => { - const tmp = makeTempDir('manifest-empty'); - try { + test('empty manifest — exists but no H3 sections', async () => { writeManifest(tmp, `# Secrets Manifest **Milestone:** M001 @@ -244,16 +202,11 @@ test('getManifestStatus: empty manifest — exists but no H3 sections', async () assert.deepStrictEqual(result!.collected, []); assert.deepStrictEqual(result!.skipped, []); assert.deepStrictEqual(result!.existing, []); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -// ─── Env via .env file (not just process.env) ──────────────────────────────── + // ─── Env via .env file (not just process.env) ──────────────────────────────── -test('getManifestStatus: key in .env file counts as existing', async () => { - const tmp = makeTempDir('manifest-dotenv'); - try { + test('key in .env file counts as existing', async () => { delete process.env.DOTENV_ONLY_KEY; writeManifest(tmp, `# Secrets Manifest @@ -277,7 +230,45 @@ test('getManifestStatus: key in .env file counts as existing', async () => { assert.notStrictEqual(result, null); assert.deepStrictEqual(result!.existing, ['DOTENV_ONLY_KEY']); assert.deepStrictEqual(result!.pending, []); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } + }); +}); + +// ─── Key in env overrides manifest status ──────────────────────────────────── + +describe('getManifestStatus: key in env overrides manifest status', () => { + let tmp: string; + let savedVal: string | undefined; + beforeEach(() => { + tmp = makeTempDir('manifest-override'); + savedVal = process.env.GSD_TEST_OVERRIDE_KEY; + process.env.GSD_TEST_OVERRIDE_KEY = 'already-here'; + }); + afterEach(() => { + delete process.env.GSD_TEST_OVERRIDE_KEY; + if (savedVal !== undefined) process.env.GSD_TEST_OVERRIDE_KEY = savedVal; + rmSync(tmp, { recursive: true, force: true }); + }); + + test('collected key in env goes to existing', async () => { + writeManifest(tmp, `# Secrets Manifest + +**Milestone:** M001 +**Generated:** 2025-06-20T10:00:00Z + +### GSD_TEST_OVERRIDE_KEY + +**Service:** Override +**Status:** collected +**Destination:** dotenv + +1. Was collected but now in env +`); + + const result = await getManifestStatus(tmp, 'M001'); + assert.notStrictEqual(result, null); + assert.deepStrictEqual(result!.pending, []); + assert.deepStrictEqual(result!.collected, []); + assert.deepStrictEqual(result!.skipped, []); + assert.deepStrictEqual(result!.existing, ['GSD_TEST_OVERRIDE_KEY']); + }); }); diff --git a/src/resources/extensions/gsd/tests/markdown-renderer.test.ts b/src/resources/extensions/gsd/tests/markdown-renderer.test.ts index f7896d9ac..35551f06d 100644 --- a/src/resources/extensions/gsd/tests/markdown-renderer.test.ts +++ b/src/resources/extensions/gsd/tests/markdown-renderer.test.ts @@ -566,6 +566,7 @@ console.log('\n── markdown-renderer: renderTaskPlanFromDb throws for missing } } + // ═══════════════════════════════════════════════════════════════════════════ // Task Summary Rendering // ═══════════════════════════════════════════════════════════════════════════ diff --git a/src/resources/extensions/gsd/tests/prompt-contracts.test.ts b/src/resources/extensions/gsd/tests/prompt-contracts.test.ts index bb14adfdb..44e86d8fa 100644 --- a/src/resources/extensions/gsd/tests/prompt-contracts.test.ts +++ b/src/resources/extensions/gsd/tests/prompt-contracts.test.ts @@ -155,10 +155,9 @@ test("plan-slice prompt explicitly names gsd_plan_slice and gsd_plan_task as DB- assert.match(prompt, /DB-backed tools are the canonical write path/i); }); -test("plan-slice prompt treats direct file writes as a degraded fallback, not the default", () => { +test("plan-slice prompt does not instruct direct file writes as a primary step", () => { const prompt = readPrompt("plan-slice"); - assert.match(prompt, /degraded path, not the default/i); - // Should not instruct to "Write {{outputPath}}" as a primary step + // Should not instruct to "Write {{outputPath}}" as a primary step — tools handle rendering assert.doesNotMatch(prompt, /^\d+\.\s+Write `?\{\{outputPath\}\}`?\s*$/m); }); @@ -172,23 +171,28 @@ test("replan-slice prompt requires DB-backed planning state when available", () assert.match(prompt, /DB-backed planning tool exists for this phase, use it as the source of truth/i); }); -test("reassess-roadmap prompt forbids roadmap-only manual edits when tool path exists", () => { +test("reassess-roadmap prompt references gsd_reassess_roadmap tool", () => { const prompt = readPrompt("reassess-roadmap"); - assert.match(prompt, /Do \*\*not\*\* bypass state with manual roadmap-only edits/i); + assert.match(prompt, /gsd_reassess_roadmap/); }); // ─── Prompt migration: replan-slice → gsd_replan_slice ──────────────── -test("replan-slice prompt names gsd_replan_slice as canonical tool", () => { +test("replan-slice prompt names gsd_replan_slice as the tool to use", () => { const prompt = readPrompt("replan-slice"); assert.match(prompt, /gsd_replan_slice/); - assert.match(prompt, /canonical write path/i); }); // ─── Prompt migration: reassess-roadmap → gsd_reassess_roadmap ─────── -test("reassess-roadmap prompt names gsd_reassess_roadmap as canonical tool", () => { +test("reassess-roadmap prompt names gsd_reassess_roadmap as the tool to use", () => { const prompt = readPrompt("reassess-roadmap"); assert.match(prompt, /gsd_reassess_roadmap/); - assert.match(prompt, /canonical write path/i); +}); + +test("reactive-execute prompt references tool calls instead of checkbox updates", () => { + const prompt = readPrompt("reactive-execute"); + assert.doesNotMatch(prompt, /checkbox updates/); + assert.doesNotMatch(prompt, /checkbox edits/); + assert.match(prompt, /completion tool calls/); }); diff --git a/src/resources/extensions/gsd/tests/rogue-file-detection.test.ts b/src/resources/extensions/gsd/tests/rogue-file-detection.test.ts index ccfbb9359..e0fd6c00e 100644 --- a/src/resources/extensions/gsd/tests/rogue-file-detection.test.ts +++ b/src/resources/extensions/gsd/tests/rogue-file-detection.test.ts @@ -57,6 +57,7 @@ function createSlicePlanOnDisk(basePath: string, mid: string, sid: string): stri return planFile; } + // ── Tests ──────────────────────────────────────────────────────────────────── test("rogue detection: task summary on disk, no DB row → detected as rogue", () => { @@ -170,6 +171,36 @@ test("rogue detection: slice summary on disk, no DB row → detected as rogue", } }); +test("rogue detection: slice summary on disk, DB row with status 'complete' → NOT rogue", () => { + const basePath = createTmpBase(); + const dbPath = join(basePath, ".gsd", "gsd.db"); + mkdirSync(join(basePath, ".gsd"), { recursive: true }); + + try { + openDatabase(dbPath); + + createSliceSummaryOnDisk(basePath, "M001", "S01"); + + // Insert parent milestone first (foreign key constraint) + insertMilestone({ id: "M001" }); + + // Insert a slice row, then update to complete + insertSlice({ + milestoneId: "M001", + id: "S01", + title: "Test Slice", + status: "complete", + }); + updateSliceStatus("M001", "S01", "complete", new Date().toISOString()); + + const rogues = detectRogueFileWrites("complete-slice", "M001/S01", basePath); + assert.equal(rogues.length, 0, "Should NOT detect rogue when slice DB row is complete"); + } finally { + closeDatabase(); + rmSync(basePath, { recursive: true, force: true }); + } +}); + test("rogue detection: plan milestone roadmap on disk, no milestone planning row → detected as rogue", () => { const basePath = createTmpBase(); const dbPath = join(basePath, ".gsd", "gsd.db"); diff --git a/src/resources/extensions/gsd/tests/service-tier.test.ts b/src/resources/extensions/gsd/tests/service-tier.test.ts index ff6d0b684..2192c9aa7 100644 --- a/src/resources/extensions/gsd/tests/service-tier.test.ts +++ b/src/resources/extensions/gsd/tests/service-tier.test.ts @@ -4,8 +4,8 @@ import assert from "node:assert/strict"; import { supportsServiceTier, formatServiceTierStatus, + formatServiceTierFooterStatus, resolveServiceTierIcon, - type ServiceTierSetting, } from "../service-tier.ts"; // ─── supportsServiceTier ───────────────────────────────────────────────────── @@ -27,6 +27,14 @@ describe("supportsServiceTier", () => { assert.equal(supportsServiceTier("openai/gpt-5.4"), true); }); + test("returns true for vibeproxy-openai/gpt-5.4 (proxy provider-prefixed)", () => { + assert.equal(supportsServiceTier("vibeproxy-openai/gpt-5.4"), true); + }); + + test("returns false for provider-only identifier without gpt-5.4 model suffix", () => { + assert.equal(supportsServiceTier("vibeproxy-openai"), false); + }); + test("returns false for claude-opus-4-6", () => { assert.equal(supportsServiceTier("claude-opus-4-6"), false); }); @@ -52,6 +60,11 @@ describe("formatServiceTierStatus", () => { assert.ok(output.includes("disabled"), `Expected 'disabled' in: ${output}`); }); + test("mentions provider-agnostic model gating", () => { + const output = formatServiceTierStatus("priority"); + assert.ok(output.includes("regardless of provider"), `Expected provider note in: ${output}`); + }); + test("shows priority when set to priority", () => { const output = formatServiceTierStatus("priority"); assert.ok(output.includes("priority"), `Expected 'priority' in: ${output}`); @@ -63,6 +76,22 @@ describe("formatServiceTierStatus", () => { }); }); +// ─── formatServiceTierFooterStatus ─────────────────────────────────────────── + +describe("formatServiceTierFooterStatus", () => { + test("returns priority footer status for supported model", () => { + assert.equal(formatServiceTierFooterStatus("priority", "vibeproxy-openai/gpt-5.4"), "fast: ⚡ priority"); + }); + + test("returns undefined for unsupported model", () => { + assert.equal(formatServiceTierFooterStatus("priority", "claude-opus-4-6"), undefined); + }); + + test("returns undefined when tier is disabled", () => { + assert.equal(formatServiceTierFooterStatus(undefined, "gpt-5.4"), undefined); + }); +}); + // ─── resolveServiceTierIcon ────────────────────────────────────────────────── describe("resolveServiceTierIcon", () => { diff --git a/src/resources/extensions/gsd/tests/skill-activation.test.ts b/src/resources/extensions/gsd/tests/skill-activation.test.ts index e2c6c7be0..673e8911c 100644 --- a/src/resources/extensions/gsd/tests/skill-activation.test.ts +++ b/src/resources/extensions/gsd/tests/skill-activation.test.ts @@ -39,7 +39,7 @@ function buildBlock( }); } -test("buildSkillActivationBlock matches installed skills from task context", () => { +test("buildSkillActivationBlock does not auto-activate skills via broad context heuristic", () => { const base = makeTempBase(); try { writeSkill(base, "react", "Use for React components, hooks, JSX, and frontend UI work."); @@ -52,7 +52,29 @@ test("buildSkillActivationBlock matches installed skills from task context", () taskTitle: "Implement React settings panel", }); - assert.match(result, //); + // Skills should not be activated just because their name appears in task context. + // Activation requires explicit preference sources (always_use, skill_rules, prefer_skills, skills_used). + assert.equal(result, ""); + } finally { + cleanup(base); + } +}); + +test("buildSkillActivationBlock activates skills via prefer_skills when context matches", () => { + const base = makeTempBase(); + try { + writeSkill(base, "react", "Use for React components, hooks, JSX, and frontend UI work."); + writeSkill(base, "swiftui", "Use for SwiftUI views, iOS layout, and Apple platform UI work."); + loadOnlyTestSkills(base); + + const result = buildBlock(base, { + sliceTitle: "Build React dashboard", + taskId: "T01", + taskTitle: "Implement React settings panel", + }, { + prefer_skills: ["react"], + }); + assert.match(result, /Call Skill\('react'\)/); assert.doesNotMatch(result, /swiftui/); } finally { @@ -105,7 +127,7 @@ test("buildSkillActivationBlock includes skill_rules matches and task-plan skill } }); -test("buildSkillActivationBlock honors avoid_skills", () => { +test("buildSkillActivationBlock honors avoid_skills against always_use_skills", () => { const base = makeTempBase(); try { writeSkill(base, "react", "Use for React components and frontend UI work."); @@ -114,6 +136,7 @@ test("buildSkillActivationBlock honors avoid_skills", () => { const result = buildBlock(base, { taskTitle: "Implement React settings panel", }, { + always_use_skills: ["react"], avoid_skills: ["react"], }); @@ -138,3 +161,33 @@ test("buildSkillActivationBlock falls back cleanly when nothing matches", () => cleanup(base); } }); + +test("buildSkillActivationBlock does not activate skills from extraContext or taskPlanContent body", () => { + const base = makeTempBase(); + try { + writeSkill(base, "xcode-build", "Use for Xcode build workflows and iOS compilation."); + writeSkill(base, "ableton-lom", "Use for Ableton Live Object Model scripting."); + writeSkill(base, "frontend-design", "Use for frontend design systems and UI components."); + loadOnlyTestSkills(base); + + const taskPlan = [ + "---", + "skills_used: []", + "---", + "# T01: Build the API endpoint", + "Use xcode-build patterns and frontend-design tokens.", + ].join("\n"); + + const result = buildBlock(base, { + taskTitle: "Build REST API", + extraContext: ["Build workflow for iOS and Ableton integration testing"], + taskPlanContent: taskPlan, + }); + + // None of these skills should activate — extraContext and taskPlanContent body + // must not be used for heuristic matching. + assert.equal(result, ""); + } finally { + cleanup(base); + } +}); diff --git a/src/resources/extensions/gsd/tests/symlink-numbered-variants.test.ts b/src/resources/extensions/gsd/tests/symlink-numbered-variants.test.ts new file mode 100644 index 000000000..ed14dfb47 --- /dev/null +++ b/src/resources/extensions/gsd/tests/symlink-numbered-variants.test.ts @@ -0,0 +1,151 @@ +/** + * Tests for macOS numbered symlink variant cleanup (#2205). + * + * macOS can rename `.gsd` to `.gsd 2`, `.gsd 3`, etc. when a directory + * already exists at the target path. ensureGsdSymlink() must detect and + * remove these numbered variants so the real `.gsd` symlink is always + * the one in use. + */ + +import { + mkdtempSync, + rmSync, + writeFileSync, + existsSync, + lstatSync, + realpathSync, + mkdirSync, + symlinkSync, + readlinkSync, +} from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; +import { execSync } from "node:child_process"; + +import { ensureGsdSymlink, externalGsdRoot } from "../repo-identity.ts"; +import { createTestContext } from "./test-helpers.ts"; + +const { assertEq, assertTrue, report } = createTestContext(); + +function run(command: string, cwd: string): string { + return execSync(command, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim(); +} + +async function main(): Promise { + const base = realpathSync(mkdtempSync(join(tmpdir(), "gsd-symlink-variants-"))); + const stateDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-state-variants-"))); + + try { + process.env.GSD_STATE_DIR = stateDir; + + // Set up a minimal git repo + run("git init -b main", base); + run('git config user.name "Pi Test"', base); + run('git config user.email "pi@example.com"', base); + run('git remote add origin git@github.com:example/repo.git', base); + writeFileSync(join(base, "README.md"), "# Test Repo\n", "utf-8"); + run("git add README.md", base); + run('git commit -m "chore: init"', base); + + const externalPath = externalGsdRoot(base); + + // ── Test: numbered variant directories are cleaned up ────────────── + console.log("\n=== ensureGsdSymlink removes numbered .gsd variants (#2205) ==="); + { + // Simulate macOS creating numbered variants: ".gsd 2", ".gsd 3" + mkdirSync(join(base, ".gsd 2"), { recursive: true }); + mkdirSync(join(base, ".gsd 3"), { recursive: true }); + mkdirSync(join(base, ".gsd 4"), { recursive: true }); + + const result = ensureGsdSymlink(base); + assertEq(result, externalPath, "ensureGsdSymlink returns external path"); + assertTrue(existsSync(join(base, ".gsd")), ".gsd exists after ensureGsdSymlink"); + assertTrue(lstatSync(join(base, ".gsd")).isSymbolicLink(), ".gsd is a symlink"); + + // The numbered variants must have been removed + assertTrue(!existsSync(join(base, ".gsd 2")), '".gsd 2" directory was cleaned up'); + assertTrue(!existsSync(join(base, ".gsd 3")), '".gsd 3" directory was cleaned up'); + assertTrue(!existsSync(join(base, ".gsd 4")), '".gsd 4" directory was cleaned up'); + } + + // ── Test: numbered variant symlinks are cleaned up ───────────────── + console.log("\n=== ensureGsdSymlink removes numbered symlink variants ==="); + { + // Clean slate + rmSync(join(base, ".gsd"), { recursive: true, force: true }); + + // Simulate: ".gsd 2" is a symlink to the correct target (the real .gsd) + // and ".gsd" doesn't exist — this is the actual macOS scenario + const staleTarget = join(stateDir, "projects", "stale-target"); + mkdirSync(staleTarget, { recursive: true }); + symlinkSync(externalPath, join(base, ".gsd 2"), "junction"); + symlinkSync(staleTarget, join(base, ".gsd 3"), "junction"); + + const result = ensureGsdSymlink(base); + assertEq(result, externalPath, "ensureGsdSymlink returns external path when variants exist"); + assertTrue(existsSync(join(base, ".gsd")), ".gsd exists"); + assertTrue(lstatSync(join(base, ".gsd")).isSymbolicLink(), ".gsd is a symlink"); + + assertTrue(!existsSync(join(base, ".gsd 2")), '".gsd 2" symlink variant was cleaned up'); + assertTrue(!existsSync(join(base, ".gsd 3")), '".gsd 3" symlink variant was cleaned up'); + } + + // ── Test: real .gsd directory blocks symlink, but variants still cleaned ── + console.log("\n=== ensureGsdSymlink cleans variants even when .gsd is a real directory ==="); + { + // Clean slate + rmSync(join(base, ".gsd"), { recursive: true, force: true }); + + // .gsd is a real directory (git-tracked) and numbered variants exist + mkdirSync(join(base, ".gsd", "milestones"), { recursive: true }); + writeFileSync(join(base, ".gsd", "milestones", "M001.md"), "# M001\n", "utf-8"); + mkdirSync(join(base, ".gsd 2"), { recursive: true }); + mkdirSync(join(base, ".gsd 3"), { recursive: true }); + + const result = ensureGsdSymlink(base); + // When .gsd is a real directory, ensureGsdSymlink preserves it + assertEq(result, join(base, ".gsd"), "real .gsd directory preserved"); + assertTrue(lstatSync(join(base, ".gsd")).isDirectory(), ".gsd remains a directory"); + + // But the numbered variants should still be cleaned up + assertTrue(!existsSync(join(base, ".gsd 2")), '".gsd 2" cleaned even when .gsd is a directory'); + assertTrue(!existsSync(join(base, ".gsd 3")), '".gsd 3" cleaned even when .gsd is a directory'); + } + + // ── Test: only numeric-suffixed variants are removed ─────────────── + console.log("\n=== ensureGsdSymlink only removes .gsd + space + digit variants ==="); + { + rmSync(join(base, ".gsd"), { recursive: true, force: true }); + + // These should NOT be touched + mkdirSync(join(base, ".gsd-backup"), { recursive: true }); + mkdirSync(join(base, ".gsd_old"), { recursive: true }); + + // These SHOULD be removed (macOS collision pattern) + mkdirSync(join(base, ".gsd 2"), { recursive: true }); + mkdirSync(join(base, ".gsd 10"), { recursive: true }); + + ensureGsdSymlink(base); + + assertTrue(existsSync(join(base, ".gsd-backup")), ".gsd-backup is NOT removed"); + assertTrue(existsSync(join(base, ".gsd_old")), ".gsd_old is NOT removed"); + assertTrue(!existsSync(join(base, ".gsd 2")), '".gsd 2" removed'); + assertTrue(!existsSync(join(base, ".gsd 10")), '".gsd 10" removed'); + + // Cleanup non-variant dirs + rmSync(join(base, ".gsd-backup"), { recursive: true, force: true }); + rmSync(join(base, ".gsd_old"), { recursive: true, force: true }); + } + + } finally { + delete process.env.GSD_STATE_DIR; + try { rmSync(base, { recursive: true, force: true }); } catch { /* ignore */ } + try { rmSync(stateDir, { recursive: true, force: true }); } catch { /* ignore */ } + report(); + } +} + +main().catch((error) => { + console.error(error); + process.exit(1); +}); diff --git a/src/resources/extensions/gsd/tests/token-cost-display.test.ts b/src/resources/extensions/gsd/tests/token-cost-display.test.ts new file mode 100644 index 000000000..e12d9e4db --- /dev/null +++ b/src/resources/extensions/gsd/tests/token-cost-display.test.ts @@ -0,0 +1,118 @@ +/** + * Tests for the show_token_cost preference (#1515). + * + * Covers: + * - Preference recognition and validation + * - Cost formatting accuracy (inline re-implementation for test isolation) + * - Disabled-by-default behavior + * - Preference parsing from markdown frontmatter + */ + +import test from "node:test"; +import assert from "node:assert/strict"; +import { + validatePreferences, + parsePreferencesMarkdown, +} from "../preferences.ts"; +import { KNOWN_PREFERENCE_KEYS } from "../preferences-types.ts"; + +// Re-implement formatPromptCost here for test isolation (avoids pi-coding-agent build dep). +// The canonical implementation lives in footer.ts. +function formatPromptCost(cost: number): string { + if (cost < 0.001) return `$${cost.toFixed(4)}`; + if (cost < 0.01) return `$${cost.toFixed(3)}`; + if (cost < 1) return `$${cost.toFixed(3)}`; + return `$${cost.toFixed(2)}`; +} + +// ── Preference recognition ────────────────────────────────────────────────── + +test("show_token_cost is a known preference key", () => { + assert.ok(KNOWN_PREFERENCE_KEYS.has("show_token_cost")); +}); + +test("show_token_cost: true validates without errors", () => { + const { errors, preferences } = validatePreferences({ show_token_cost: true }); + assert.equal(errors.length, 0); + assert.equal(preferences.show_token_cost, true); +}); + +test("show_token_cost: false validates without errors", () => { + const { errors, preferences } = validatePreferences({ show_token_cost: false }); + assert.equal(errors.length, 0); + assert.equal(preferences.show_token_cost, false); +}); + +test("show_token_cost: non-boolean produces validation error", () => { + const { errors } = validatePreferences({ show_token_cost: "yes" as any }); + assert.ok(errors.length > 0); + assert.ok(errors[0].includes("show_token_cost")); + assert.ok(errors[0].includes("boolean")); +}); + +test("show_token_cost does not produce unknown-key warning", () => { + const { warnings } = validatePreferences({ show_token_cost: true }); + const unknownWarnings = warnings.filter(w => w.includes("show_token_cost")); + assert.equal(unknownWarnings.length, 0); +}); + +// ── Disabled by default ───────────────────────────────────────────────────── + +test("show_token_cost defaults to undefined (disabled) when not set", () => { + const { preferences } = validatePreferences({}); + assert.equal(preferences.show_token_cost, undefined); +}); + +test("empty preferences.md does not enable show_token_cost", () => { + const prefs = parsePreferencesMarkdown("---\nversion: 1\n---\n"); + assert.ok(prefs); + assert.equal(prefs.show_token_cost, undefined); +}); + +test("preferences.md with show_token_cost: true enables the preference", () => { + const prefs = parsePreferencesMarkdown("---\nshow_token_cost: true\n---\n"); + assert.ok(prefs); + assert.equal(prefs.show_token_cost, true); +}); + +// ── Cost formatting ───────────────────────────────────────────────────────── + +test("formatPromptCost formats sub-cent amounts with 4 decimals", () => { + assert.equal(formatPromptCost(0.0003), "$0.0003"); + assert.equal(formatPromptCost(0.0009), "$0.0009"); +}); + +test("formatPromptCost formats cent-range amounts with 3 decimals", () => { + assert.equal(formatPromptCost(0.003), "$0.003"); + assert.equal(formatPromptCost(0.012), "$0.012"); + assert.equal(formatPromptCost(0.1), "$0.100"); +}); + +test("formatPromptCost formats dollar-range amounts with 2 decimals", () => { + assert.equal(formatPromptCost(1.5), "$1.50"); + assert.equal(formatPromptCost(12.345), "$12.35"); +}); + +test("formatPromptCost handles zero", () => { + assert.equal(formatPromptCost(0), "$0.0000"); +}); + +// ── Cost calculation correctness ──────────────────────────────────────────── + +test("cost calculation formula matches Model cost structure", () => { + // Simulates: usage.input * model.cost.input / 1_000_000 + usage.output * model.cost.output / 1_000_000 + // Model.cost fields are $/million tokens + const modelCost = { input: 15, output: 75, cacheRead: 1.5, cacheWrite: 18.75 }; // claude-opus-4 pricing + const usage = { input: 2000, output: 500, cacheRead: 10000, cacheWrite: 1000 }; + + const cost = + (usage.input * modelCost.input / 1_000_000) + + (usage.output * modelCost.output / 1_000_000) + + (usage.cacheRead * modelCost.cacheRead / 1_000_000) + + (usage.cacheWrite * modelCost.cacheWrite / 1_000_000); + + // 2000*15/1M + 500*75/1M + 10000*1.5/1M + 1000*18.75/1M + // = 0.03 + 0.0375 + 0.015 + 0.01875 = 0.10125 + assert.ok(Math.abs(cost - 0.10125) < 0.0001, `Expected ~$0.10125 but got $${cost}`); + assert.equal(formatPromptCost(cost), "$0.101"); +}); diff --git a/src/resources/extensions/gsd/tests/verification-gate.test.ts b/src/resources/extensions/gsd/tests/verification-gate.test.ts index 05a96fcd5..c87f07a6b 100644 --- a/src/resources/extensions/gsd/tests/verification-gate.test.ts +++ b/src/resources/extensions/gsd/tests/verification-gate.test.ts @@ -15,7 +15,7 @@ * 11. Dependency audit — git diff detection, npm audit parsing, graceful failures */ -import test from "node:test"; +import { describe, test, beforeEach, afterEach } from "node:test"; import assert from "node:assert/strict"; import { mkdirSync, writeFileSync, rmSync } from "node:fs"; import { join, dirname } from "node:path"; @@ -37,37 +37,30 @@ function makeTempDir(prefix: string): string { // ─── Discovery Tests ───────────────────────────────────────────────────────── -test("verification-gate: discoverCommands from preference commands", () => { - const tmp = makeTempDir("vg-pref"); - try { +describe("verification-gate: discovery", () => { + let tmp: string; + beforeEach(() => { tmp = makeTempDir("vg-discovery"); }); + afterEach(() => { rmSync(tmp, { recursive: true, force: true }); }); + + test("discoverCommands from preference commands", () => { const result = discoverCommands({ preferenceCommands: ["npm run lint", "npm run test"], cwd: tmp, }); assert.deepStrictEqual(result.commands, ["npm run lint", "npm run test"]); assert.equal(result.source, "preference"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: discoverCommands from task plan verify field", () => { - const tmp = makeTempDir("vg-taskplan"); - try { + test("discoverCommands from task plan verify field", () => { const result = discoverCommands({ taskPlanVerify: "npm run lint && npm run test", cwd: tmp, }); assert.deepStrictEqual(result.commands, ["npm run lint", "npm run test"]); assert.equal(result.source, "task-plan"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: discoverCommands from package.json scripts", () => { - const tmp = makeTempDir("vg-pkg"); - try { + test("discoverCommands from package.json scripts", () => { writeFileSync( join(tmp, "package.json"), JSON.stringify({ @@ -86,14 +79,9 @@ test("verification-gate: discoverCommands from package.json scripts", () => { "npm run test", ]); assert.equal(result.source, "package-json"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: first-non-empty-wins — preference beats task plan and package.json", () => { - const tmp = makeTempDir("vg-precedence"); - try { + test("first-non-empty-wins — preference beats task plan and package.json", () => { writeFileSync( join(tmp, "package.json"), JSON.stringify({ scripts: { lint: "eslint ." } }), @@ -105,14 +93,9 @@ test("verification-gate: first-non-empty-wins — preference beats task plan and }); assert.deepStrictEqual(result.commands, ["custom-check"]); assert.equal(result.source, "preference"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: task plan verify beats package.json", () => { - const tmp = makeTempDir("vg-tp-beats-pkg"); - try { + test("task plan verify beats package.json", () => { writeFileSync( join(tmp, "package.json"), JSON.stringify({ scripts: { lint: "eslint ." } }), @@ -123,25 +106,15 @@ test("verification-gate: task plan verify beats package.json", () => { }); assert.deepStrictEqual(result.commands, ["custom-verify"]); assert.equal(result.source, "task-plan"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: missing package.json → 0 checks, source none", () => { - const tmp = makeTempDir("vg-no-pkg"); - try { + test("missing package.json → 0 checks, source none", () => { const result = discoverCommands({ cwd: tmp }); assert.deepStrictEqual(result.commands, []); assert.equal(result.source, "none"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: package.json with no matching scripts → 0 checks", () => { - const tmp = makeTempDir("vg-no-scripts"); - try { + test("package.json with no matching scripts → 0 checks", () => { writeFileSync( join(tmp, "package.json"), JSON.stringify({ scripts: { build: "tsc", start: "node index.js" } }), @@ -149,14 +122,9 @@ test("verification-gate: package.json with no matching scripts → 0 checks", () const result = discoverCommands({ cwd: tmp }); assert.deepStrictEqual(result.commands, []); assert.equal(result.source, "none"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: empty preference array falls through to task plan", () => { - const tmp = makeTempDir("vg-empty-pref"); - try { + test("empty preference array falls through to task plan", () => { const result = discoverCommands({ preferenceCommands: [], taskPlanVerify: "echo ok", @@ -164,16 +132,99 @@ test("verification-gate: empty preference array falls through to task plan", () }); assert.deepStrictEqual(result.commands, ["echo ok"]); assert.equal(result.source, "task-plan"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } + }); + + test("package.json with only test script → returns only npm run test", () => { + writeFileSync( + join(tmp, "package.json"), + JSON.stringify({ + scripts: { + test: "vitest", + build: "tsc", + start: "node index.js", + }, + }), + ); + const result = discoverCommands({ cwd: tmp }); + assert.deepStrictEqual(result.commands, ["npm run test"]); + assert.equal(result.source, "package-json"); + }); + + test("taskPlanVerify with single command (no &&)", () => { + const result = discoverCommands({ + taskPlanVerify: "npm test", + cwd: tmp, + }); + assert.deepStrictEqual(result.commands, ["npm test"]); + assert.equal(result.source, "task-plan"); + }); + + test("whitespace-only preference commands fall through", () => { + writeFileSync( + join(tmp, "package.json"), + JSON.stringify({ scripts: { lint: "eslint ." } }), + ); + const result = discoverCommands({ + preferenceCommands: [" ", ""], + cwd: tmp, + }); + // Whitespace-only strings are trimmed to empty and filtered out + assert.equal(result.source, "package-json"); + assert.deepStrictEqual(result.commands, ["npm run lint"]); + }); + + test("prose taskPlanVerify is rejected, falls through to package.json", () => { + writeFileSync( + join(tmp, "package.json"), + JSON.stringify({ scripts: { test: "vitest" } }), + ); + const result = discoverCommands({ + taskPlanVerify: "Document exists, contains all 5 scale names, all 14 semantic tokens", + cwd: tmp, + }); + // Prose should be rejected, so it falls through to package.json + assert.equal(result.source, "package-json"); + assert.deepStrictEqual(result.commands, ["npm run test"]); + }); + + test("prose taskPlanVerify with no package.json → source none", () => { + const result = discoverCommands({ + taskPlanVerify: "Verify the output matches expected format and all fields are present", + cwd: tmp, + }); + assert.equal(result.source, "none"); + assert.deepStrictEqual(result.commands, []); + }); + + test("valid command in taskPlanVerify still works", () => { + const result = discoverCommands({ + taskPlanVerify: "npm run lint && npm run test", + cwd: tmp, + }); + assert.equal(result.source, "task-plan"); + assert.deepStrictEqual(result.commands, ["npm run lint", "npm run test"]); + }); + + test("mixed prose and commands in taskPlanVerify — only commands kept", () => { + const result = discoverCommands({ + taskPlanVerify: "Check that everything works && npm run test", + cwd: tmp, + }); + // "Check that everything works" is prose (starts with capital, 4+ words) + // "npm run test" is a valid command + assert.equal(result.source, "task-plan"); + assert.deepStrictEqual(result.commands, ["npm run test"]); + }); }); // ─── Execution Tests ───────────────────────────────────────────────────────── -test("verification-gate: all commands pass → gate passes", () => { - const tmp = makeTempDir("vg-pass"); - try { +describe("verification-gate: execution", () => { + let tmp: string; + beforeEach(() => { tmp = makeTempDir("vg-exec"); }); + afterEach(() => { rmSync(tmp, { recursive: true, force: true }); }); + + test("all commands pass → gate passes", () => { const result = runVerificationGate({ basePath: tmp, unitId: "T01", @@ -188,14 +239,9 @@ test("verification-gate: all commands pass → gate passes", () => { assert.ok(result.checks[0].stdout.includes("hello")); assert.ok(result.checks[1].stdout.includes("world")); assert.equal(typeof result.timestamp, "number"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: one command fails → gate fails with exit code + stderr", () => { - const tmp = makeTempDir("vg-fail"); - try { + test("one command fails → gate fails with exit code + stderr", () => { const result = runVerificationGate({ basePath: tmp, unitId: "T01", @@ -207,14 +253,9 @@ test("verification-gate: one command fails → gate fails with exit code + stder assert.equal(result.checks[0].exitCode, 0); assert.equal(result.checks[1].exitCode, 1); assert.ok(result.checks[1].stderr.includes("err")); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: no commands discovered → gate passes with 0 checks", () => { - const tmp = makeTempDir("vg-empty"); - try { + test("no commands discovered → gate passes with 0 checks", () => { const result = runVerificationGate({ basePath: tmp, unitId: "T01", @@ -223,14 +264,9 @@ test("verification-gate: no commands discovered → gate passes with 0 checks", assert.equal(result.passed, true); assert.equal(result.checks.length, 0); assert.equal(result.discoverySource, "none"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: command not found → exit code 127", () => { - const tmp = makeTempDir("vg-notfound"); - try { + test("command not found → exit code 127", () => { const result = runVerificationGate({ basePath: tmp, unitId: "T01", @@ -241,14 +277,9 @@ test("verification-gate: command not found → exit code 127", () => { assert.equal(result.checks.length, 1); assert.ok(result.checks[0].exitCode !== 0, "should have non-zero exit code"); assert.ok(result.checks[0].durationMs >= 0); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: no DEP0190 deprecation warning when running commands", () => { - const tmp = makeTempDir("vg-dep0190"); - try { + test("no DEP0190 deprecation warning when running commands", () => { // Run a subprocess with --throw-deprecation so any DeprecationWarning // becomes a thrown error (non-zero exit). The fix passes the command // string to sh -c explicitly instead of using spawnSync(cmd, {shell:true}). @@ -282,14 +313,9 @@ test("verification-gate: no DEP0190 deprecation warning when running commands", 0, `Expected exit 0 (no deprecation) but got ${child.status}. stderr: ${child.stderr}`, ); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); + }); -test("verification-gate: each check has durationMs", () => { - const tmp = makeTempDir("vg-duration"); - try { + test("each check has durationMs", () => { const result = runVerificationGate({ basePath: tmp, unitId: "T01", @@ -299,9 +325,42 @@ test("verification-gate: each check has durationMs", () => { assert.equal(result.checks.length, 1); assert.equal(typeof result.checks[0].durationMs, "number"); assert.ok(result.checks[0].durationMs >= 0); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } + }); + + test("one command fails — remaining commands still run (non-short-circuit)", () => { + // First fails, second and third should still execute + const result = runVerificationGate({ + basePath: tmp, + unitId: "T02", + cwd: tmp, + preferenceCommands: [ + "sh -c 'exit 1'", + "echo second", + "echo third", + ], + }); + assert.equal(result.passed, false); + assert.equal(result.checks.length, 3, "all 3 commands should run"); + assert.equal(result.checks[0].exitCode, 1, "first command fails"); + assert.equal(result.checks[1].exitCode, 0, "second command runs and passes"); + assert.ok(result.checks[1].stdout.includes("second")); + assert.equal(result.checks[2].exitCode, 0, "third command runs and passes"); + assert.ok(result.checks[2].stdout.includes("third")); + }); + + test("gate execution uses cwd for spawnSync", () => { + // pwd should report the temp dir + const result = runVerificationGate({ + basePath: tmp, + unitId: "T02", + cwd: tmp, + preferenceCommands: ["pwd"], + }); + assert.equal(result.passed, true); + assert.equal(result.checks.length, 1); + // The stdout should contain the tmp dir path (resolving symlinks) + assert.ok(result.checks[0].stdout.trim().length > 0, "pwd should produce output"); + }); }); // ─── Preference Validation Tests ───────────────────────────────────────────── @@ -361,62 +420,6 @@ test("verification-gate: validatePreferences floors verification_max_retries", ( assert.equal(result.errors.length, 0); }); -// ─── Additional Discovery Tests (T02) ─────────────────────────────────────── - -test("verification-gate: package.json with only test script → returns only npm run test", () => { - const tmp = makeTempDir("vg-only-test"); - try { - writeFileSync( - join(tmp, "package.json"), - JSON.stringify({ - scripts: { - test: "vitest", - build: "tsc", - start: "node index.js", - }, - }), - ); - const result = discoverCommands({ cwd: tmp }); - assert.deepStrictEqual(result.commands, ["npm run test"]); - assert.equal(result.source, "package-json"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); - -test("verification-gate: taskPlanVerify with single command (no &&)", () => { - const tmp = makeTempDir("vg-tp-single"); - try { - const result = discoverCommands({ - taskPlanVerify: "npm test", - cwd: tmp, - }); - assert.deepStrictEqual(result.commands, ["npm test"]); - assert.equal(result.source, "task-plan"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); - -test("verification-gate: whitespace-only preference commands fall through", () => { - const tmp = makeTempDir("vg-ws-pref"); - try { - writeFileSync( - join(tmp, "package.json"), - JSON.stringify({ scripts: { lint: "eslint ." } }), - ); - const result = discoverCommands({ - preferenceCommands: [" ", ""], - cwd: tmp, - }); - // Whitespace-only strings are trimmed to empty and filtered out - assert.equal(result.source, "package-json"); - assert.deepStrictEqual(result.commands, ["npm run lint"]); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); - // ─── isLikelyCommand Tests (issue #1066) ──────────────────────────────────── test("isLikelyCommand: known command prefixes are accepted", () => { @@ -468,116 +471,6 @@ test("isLikelyCommand: short lowercase tokens without flags are accepted (could assert.equal(isLikelyCommand("mycheck"), true); }); -test("verification-gate: prose taskPlanVerify is rejected, falls through to package.json", () => { - const tmp = makeTempDir("vg-prose-reject"); - try { - writeFileSync( - join(tmp, "package.json"), - JSON.stringify({ scripts: { test: "vitest" } }), - ); - const result = discoverCommands({ - taskPlanVerify: "Document exists, contains all 5 scale names, all 14 semantic tokens", - cwd: tmp, - }); - // Prose should be rejected, so it falls through to package.json - assert.equal(result.source, "package-json"); - assert.deepStrictEqual(result.commands, ["npm run test"]); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); - -test("verification-gate: prose taskPlanVerify with no package.json → source none", () => { - const tmp = makeTempDir("vg-prose-none"); - try { - const result = discoverCommands({ - taskPlanVerify: "Verify the output matches expected format and all fields are present", - cwd: tmp, - }); - assert.equal(result.source, "none"); - assert.deepStrictEqual(result.commands, []); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); - -test("verification-gate: valid command in taskPlanVerify still works", () => { - const tmp = makeTempDir("vg-valid-cmd"); - try { - const result = discoverCommands({ - taskPlanVerify: "npm run lint && npm run test", - cwd: tmp, - }); - assert.equal(result.source, "task-plan"); - assert.deepStrictEqual(result.commands, ["npm run lint", "npm run test"]); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); - -test("verification-gate: mixed prose and commands in taskPlanVerify — only commands kept", () => { - const tmp = makeTempDir("vg-mixed"); - try { - const result = discoverCommands({ - taskPlanVerify: "Check that everything works && npm run test", - cwd: tmp, - }); - // "Check that everything works" is prose (starts with capital, 4+ words) - // "npm run test" is a valid command - assert.equal(result.source, "task-plan"); - assert.deepStrictEqual(result.commands, ["npm run test"]); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); - -// ─── Additional Execution Tests (T02) ─────────────────────────────────────── - -test("verification-gate: one command fails — remaining commands still run (non-short-circuit)", () => { - const tmp = makeTempDir("vg-no-short-circuit"); - try { - // First fails, second and third should still execute - const result = runVerificationGate({ - basePath: tmp, - unitId: "T02", - cwd: tmp, - preferenceCommands: [ - "sh -c 'exit 1'", - "echo second", - "echo third", - ], - }); - assert.equal(result.passed, false); - assert.equal(result.checks.length, 3, "all 3 commands should run"); - assert.equal(result.checks[0].exitCode, 1, "first command fails"); - assert.equal(result.checks[1].exitCode, 0, "second command runs and passes"); - assert.ok(result.checks[1].stdout.includes("second")); - assert.equal(result.checks[2].exitCode, 0, "third command runs and passes"); - assert.ok(result.checks[2].stdout.includes("third")); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); - -test("verification-gate: gate execution uses cwd for spawnSync", () => { - const tmp = makeTempDir("vg-cwd"); - try { - // pwd should report the temp dir - const result = runVerificationGate({ - basePath: tmp, - unitId: "T02", - cwd: tmp, - preferenceCommands: ["pwd"], - }); - assert.equal(result.passed, true); - assert.equal(result.checks.length, 1); - // The stdout should contain the tmp dir path (resolving symlinks) - assert.ok(result.checks[0].stdout.trim().length > 0, "pwd should produce output"); - } finally { - rmSync(tmp, { recursive: true, force: true }); - } -}); - // ─── Additional Preference Validation Tests (T02) ────────────────────────── test("verification-gate: verification_commands produces no unknown-key warnings", () => { diff --git a/src/resources/extensions/gsd/tests/worktree-health-dispatch.test.ts b/src/resources/extensions/gsd/tests/worktree-health-dispatch.test.ts index de29eef1a..6c2ed26f7 100644 --- a/src/resources/extensions/gsd/tests/worktree-health-dispatch.test.ts +++ b/src/resources/extensions/gsd/tests/worktree-health-dispatch.test.ts @@ -7,7 +7,7 @@ * rather than hard-coding package.json / src/ only. */ -import test from "node:test"; +import { describe, test, beforeEach, afterEach } from "node:test"; import assert from "node:assert/strict"; import { mkdtempSync, mkdirSync, writeFileSync, rmSync } from "node:fs"; import { join } from "node:path"; @@ -73,113 +73,70 @@ test("PROJECT_FILES is exported and contains expected multi-ecosystem entries", assert.ok(PROJECT_FILES.includes("Package.swift"), "includes Swift marker"); }); -test("health check passes for Rust project (Cargo.toml, no package.json)", () => { - const dir = createGitRepo(); - try { +describe("health check with git repo", () => { + let dir: string; + beforeEach(() => { dir = createGitRepo(); }); + afterEach(() => { rmSync(dir, { recursive: true, force: true }); }); + + test("health check passes for Rust project (Cargo.toml, no package.json)", () => { writeFileSync(join(dir, "Cargo.toml"), "[package]\nname = \"test\"\n"); mkdirSync(join(dir, "crates"), { recursive: true }); assert.ok(wouldPassHealthCheck(dir, existsSync), "Rust project should pass health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("health check passes for Go project (go.mod, no package.json)", () => { - const dir = createGitRepo(); - try { + test("health check passes for Go project (go.mod, no package.json)", () => { writeFileSync(join(dir, "go.mod"), "module example.com/test\n\ngo 1.21\n"); assert.ok(wouldPassHealthCheck(dir, existsSync), "Go project should pass health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("health check passes for Python project (pyproject.toml, no package.json)", () => { - const dir = createGitRepo(); - try { + test("health check passes for Python project (pyproject.toml, no package.json)", () => { writeFileSync(join(dir, "pyproject.toml"), "[project]\nname = \"test\"\n"); assert.ok(wouldPassHealthCheck(dir, existsSync), "Python project should pass health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("health check passes for Java project (pom.xml, no package.json)", () => { - const dir = createGitRepo(); - try { + test("health check passes for Java project (pom.xml, no package.json)", () => { writeFileSync(join(dir, "pom.xml"), "\n"); assert.ok(wouldPassHealthCheck(dir, existsSync), "Java project should pass health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("health check passes for Swift project (Package.swift, no package.json)", () => { - const dir = createGitRepo(); - try { + test("health check passes for Swift project (Package.swift, no package.json)", () => { writeFileSync(join(dir, "Package.swift"), "// swift-tools-version:5.7\n"); assert.ok(wouldPassHealthCheck(dir, existsSync), "Swift project should pass health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("health check passes for C/C++ project (CMakeLists.txt, no package.json)", () => { - const dir = createGitRepo(); - try { + test("health check passes for C/C++ project (CMakeLists.txt, no package.json)", () => { writeFileSync(join(dir, "CMakeLists.txt"), "cmake_minimum_required(VERSION 3.20)\n"); assert.ok(wouldPassHealthCheck(dir, existsSync), "C/C++ project should pass health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("health check passes for Elixir project (mix.exs, no package.json)", () => { - const dir = createGitRepo(); - try { + test("health check passes for Elixir project (mix.exs, no package.json)", () => { writeFileSync(join(dir, "mix.exs"), "defmodule Test.MixProject do\nend\n"); assert.ok(wouldPassHealthCheck(dir, existsSync), "Elixir project should pass health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("health check passes for JS project (package.json, backward compat)", () => { - const dir = createGitRepo(); - try { + test("health check passes for JS project (package.json, backward compat)", () => { writeFileSync(join(dir, "package.json"), '{"name":"test"}\n'); assert.ok(wouldPassHealthCheck(dir, existsSync), "JS project should pass health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("health check passes for src/-only project (backward compat)", () => { - const dir = createGitRepo(); - try { + test("health check passes for src/-only project (backward compat)", () => { mkdirSync(join(dir, "src"), { recursive: true }); assert.ok(wouldPassHealthCheck(dir, existsSync), "src/-only project should pass health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); + }); -test("health check fails for directory with no .git", () => { - const dir = mkdtempSync(join(tmpdir(), "wt-dispatch-test-nogit-")); - try { - writeFileSync(join(dir, "Cargo.toml"), "[package]\nname = \"test\"\n"); - assert.ok(!wouldPassHealthCheck(dir, existsSync), "no-git directory should fail health check"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } -}); - -test("health check passes for empty git repo (greenfield project)", () => { - const dir = createGitRepo(); - try { + test("health check passes for empty git repo (greenfield project)", () => { assert.ok(wouldPassHealthCheck(dir, existsSync), "empty git repo should pass health check (greenfield)"); assert.ok(!hasRecognizedProjectFiles(dir, existsSync), "empty git repo has no recognized project files"); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + }); +}); + +describe("health check without git repo", () => { + let dir: string; + beforeEach(() => { dir = mkdtempSync(join(tmpdir(), "wt-dispatch-test-nogit-")); }); + afterEach(() => { rmSync(dir, { recursive: true, force: true }); }); + + test("health check fails for directory with no .git", () => { + writeFileSync(join(dir, "Cargo.toml"), "[package]\nname = \"test\"\n"); + assert.ok(!wouldPassHealthCheck(dir, existsSync), "no-git directory should fail health check"); + }); }); diff --git a/src/resources/extensions/gsd/tests/worktree-manager.test.ts b/src/resources/extensions/gsd/tests/worktree-manager.test.ts index 9b836ad30..68b038d81 100644 --- a/src/resources/extensions/gsd/tests/worktree-manager.test.ts +++ b/src/resources/extensions/gsd/tests/worktree-manager.test.ts @@ -1,4 +1,4 @@ -import test from "node:test"; +import { describe, test, beforeEach, afterEach } from "node:test"; import assert from "node:assert/strict"; import { mkdtempSync, mkdirSync, rmSync, writeFileSync, existsSync } from "node:fs"; import { join } from "node:path"; @@ -73,9 +73,12 @@ test("worktreeBranchName formats branch name", () => { // ─── createWorktree ─────────────────────────────────────────────────────────── -test("createWorktree creates worktree with correct metadata", () => { - const base = makeBaseRepo(); - try { +describe("createWorktree", () => { + let base: string; + beforeEach(() => { base = makeBaseRepo(); }); + afterEach(() => { rmSync(base, { recursive: true, force: true }); }); + + test("creates worktree with correct metadata", () => { const info = createWorktree(base, "feature-x"); assert.strictEqual(info.name, "feature-x", "name should match"); assert.strictEqual(info.branch, "worktree/feature-x", "branch should be prefixed"); @@ -88,33 +91,9 @@ test("createWorktree creates worktree with correct metadata", () => { ); const branches = run("git branch", base); assert.ok(branches.includes("worktree/feature-x"), "branch should be created in base repo"); - } finally { - rmSync(base, { recursive: true, force: true }); - } -}); + }); -test("createWorktree rejects duplicate name", () => { - const { base } = makeRepoWithWorktree("feature-x"); - try { - assert.throws( - () => createWorktree(base, "feature-x"), - (err: Error) => { - assert.ok( - err.message.includes("already exists"), - `expected "already exists" in error, got: ${err.message}`, - ); - return true; - }, - "should throw on duplicate worktree name", - ); - } finally { - rmSync(base, { recursive: true, force: true }); - } -}); - -test("createWorktree rejects invalid name", () => { - const base = makeBaseRepo(); - try { + test("rejects invalid name", () => { assert.throws( () => createWorktree(base, "bad name!"), (err: Error) => { @@ -126,42 +105,68 @@ test("createWorktree rejects invalid name", () => { }, "should throw on invalid worktree name", ); - } finally { - rmSync(base, { recursive: true, force: true }); - } + }); +}); + +describe("createWorktree — duplicate rejection", () => { + let base: string; + beforeEach(() => { + const repo = makeRepoWithWorktree("feature-x"); + base = repo.base; + }); + afterEach(() => { rmSync(base, { recursive: true, force: true }); }); + + test("rejects duplicate name", () => { + assert.throws( + () => createWorktree(base, "feature-x"), + (err: Error) => { + assert.ok( + err.message.includes("already exists"), + `expected "already exists" in error, got: ${err.message}`, + ); + return true; + }, + "should throw on duplicate worktree name", + ); + }); }); // ─── listWorktrees ──────────────────────────────────────────────────────────── -test("listWorktrees returns active worktrees", () => { - const { base } = makeRepoWithWorktree("feature-x"); - try { +describe("listWorktrees", () => { + let base: string; + beforeEach(() => { + const repo = makeRepoWithWorktree("feature-x"); + base = repo.base; + }); + afterEach(() => { rmSync(base, { recursive: true, force: true }); }); + + test("returns active worktrees", () => { const list = listWorktrees(base); assert.strictEqual(list.length, 1, "should list exactly one worktree"); assert.strictEqual(list[0]!.name, "feature-x", "name should match"); assert.strictEqual(list[0]!.branch, "worktree/feature-x", "branch should match"); assert.ok(list[0]!.exists, "exists flag should be true"); - } finally { - rmSync(base, { recursive: true, force: true }); - } -}); + }); -test("listWorktrees returns empty after removal", () => { - const { base } = makeRepoWithWorktree("feature-x"); - try { + test("returns empty after removal", () => { removeWorktree(base, "feature-x"); const list = listWorktrees(base); assert.strictEqual(list.length, 0, "should have no worktrees after removal"); - } finally { - rmSync(base, { recursive: true, force: true }); - } + }); }); // ─── diffWorktreeGSD ───────────────────────────────────────────────────────── -test("diffWorktreeGSD detects added and modified GSD files", () => { - const { base } = makeRepoWithChanges("feature-x"); - try { +describe("diffWorktreeGSD and getWorktreeGSDDiff", () => { + let base: string; + beforeEach(() => { + const repo = makeRepoWithChanges("feature-x"); + base = repo.base; + }); + afterEach(() => { rmSync(base, { recursive: true, force: true }); }); + + test("detects added and modified GSD files", () => { const diff = diffWorktreeGSD(base, "feature-x"); assert.ok(diff.added.length > 0, "should have added files"); assert.ok( @@ -174,58 +179,60 @@ test("diffWorktreeGSD detects added and modified GSD files", () => { "M001 roadmap should be in modified files", ); assert.strictEqual(diff.removed.length, 0, "should have no removed files"); - } finally { - rmSync(base, { recursive: true, force: true }); - } -}); + }); -// ─── getWorktreeGSDDiff ─────────────────────────────────────────────────────── - -test("getWorktreeGSDDiff returns patch content", () => { - const { base } = makeRepoWithChanges("feature-x"); - try { + test("returns patch content", () => { const fullDiff = getWorktreeGSDDiff(base, "feature-x"); assert.ok(fullDiff.includes("M002"), "diff should mention M002"); assert.ok(fullDiff.includes("updated"), "diff should mention the update"); - } finally { - rmSync(base, { recursive: true, force: true }); - } + }); }); // ─── getWorktreeLog ─────────────────────────────────────────────────────────── -test("getWorktreeLog shows commits", () => { - const { base } = makeRepoWithChanges("feature-x"); - try { +describe("getWorktreeLog", () => { + let base: string; + beforeEach(() => { + const repo = makeRepoWithChanges("feature-x"); + base = repo.base; + }); + afterEach(() => { rmSync(base, { recursive: true, force: true }); }); + + test("shows commits", () => { const log = getWorktreeLog(base, "feature-x"); assert.ok(log.includes("add M002"), "log should include the commit message"); - } finally { - rmSync(base, { recursive: true, force: true }); - } + }); }); // ─── removeWorktree ─────────────────────────────────────────────────────────── -test("removeWorktree removes directory and branch", () => { - const { base, wtPath } = makeRepoWithWorktree("feature-x"); - try { +describe("removeWorktree", () => { + let base: string; + let wtPath: string; + beforeEach(() => { + const repo = makeRepoWithWorktree("feature-x"); + base = repo.base; + wtPath = repo.wtPath; + }); + afterEach(() => { rmSync(base, { recursive: true, force: true }); }); + + test("removes directory and branch", () => { removeWorktree(base, "feature-x", { deleteBranch: true }); assert.ok(!existsSync(wtPath), "worktree directory should be gone"); const branches = run("git branch", base); assert.ok(!branches.includes("worktree/feature-x"), "branch should be deleted"); - } finally { - rmSync(base, { recursive: true, force: true }); - } + }); }); -test("removeWorktree on missing worktree does not throw", () => { - const base = makeBaseRepo(); - try { +describe("removeWorktree — missing worktree", () => { + let base: string; + beforeEach(() => { base = makeBaseRepo(); }); + afterEach(() => { rmSync(base, { recursive: true, force: true }); }); + + test("on missing worktree does not throw", () => { assert.doesNotThrow( () => removeWorktree(base, "nonexistent"), "should not throw when worktree does not exist", ); - } finally { - rmSync(base, { recursive: true, force: true }); - } + }); }); diff --git a/src/resources/extensions/gsd/tests/worktree-resolver.test.ts b/src/resources/extensions/gsd/tests/worktree-resolver.test.ts index 2c4330dfe..11718a263 100644 --- a/src/resources/extensions/gsd/tests/worktree-resolver.test.ts +++ b/src/resources/extensions/gsd/tests/worktree-resolver.test.ts @@ -139,11 +139,10 @@ function makeDeps( captureIntegrationBranch: ( basePath: string, mid: string | undefined, - opts?: { commitDocs?: boolean }, ) => { calls.push({ fn: "captureIntegrationBranch", - args: [basePath, mid, opts], + args: [basePath, mid], }); }, ...overrides, diff --git a/src/resources/extensions/gsd/worktree-resolver.ts b/src/resources/extensions/gsd/worktree-resolver.ts index 4a7723eee..dceb4ed26 100644 --- a/src/resources/extensions/gsd/worktree-resolver.ts +++ b/src/resources/extensions/gsd/worktree-resolver.ts @@ -63,7 +63,6 @@ export interface WorktreeResolverDeps { captureIntegrationBranch: ( basePath: string, mid: string, - opts?: { commitDocs?: boolean }, ) => void; } @@ -410,10 +409,10 @@ export class WorktreeResolver { }); // Surface a clear, actionable error. The worktree and milestone branch are // intentionally preserved — nothing has been deleted. The user can retry - // /complete-milestone or merge manually once the underlying issue is fixed + // /gsd dispatch complete-milestone or merge manually once the underlying issue is fixed // (e.g. checkout to wrong branch, unresolved conflicts). (#1668) ctx.notify( - `Milestone merge failed: ${msg}. Your worktree and milestone branch are preserved — retry /complete-milestone or merge manually.`, + `Milestone merge failed: ${msg}. Your worktree and milestone branch are preserved — retry /gsd dispatch complete-milestone or merge manually.`, "warning", ); diff --git a/src/resources/extensions/gsd/worktree.ts b/src/resources/extensions/gsd/worktree.ts index 6d089f92d..84d3dd6d2 100644 --- a/src/resources/extensions/gsd/worktree.ts +++ b/src/resources/extensions/gsd/worktree.ts @@ -57,13 +57,13 @@ export function setActiveMilestoneId(basePath: string, milestoneId: string | nul * record when the user starts from a different branch (#300). Always a no-op * if on a GSD slice branch. */ -export function captureIntegrationBranch(basePath: string, milestoneId: string, options?: { commitDocs?: boolean }): void { +export function captureIntegrationBranch(basePath: string, milestoneId: string): void { // In a worktree, the base branch is implicit (worktree/). // Writing it to META.json would leave stale metadata after merge back to main. if (detectWorktreeName(basePath)) return; const svc = getService(basePath); const current = svc.getCurrentBranch(); - writeIntegrationBranch(basePath, milestoneId, current, options); + writeIntegrationBranch(basePath, milestoneId, current); } // ─── Pure Utility Functions (unchanged) ──────────────────────────────────── diff --git a/src/resources/extensions/mcp-client/index.ts b/src/resources/extensions/mcp-client/index.ts index 904fbbcb4..2113540ff 100644 --- a/src/resources/extensions/mcp-client/index.ts +++ b/src/resources/extensions/mcp-client/index.ts @@ -149,7 +149,11 @@ async function getOrConnect(name: string, signal?: AbortSignal): Promise stderr: "pipe", }); } else if (config.transport === "http" && config.url) { - transport = new StreamableHTTPClientTransport(new URL(config.url)); + const resolvedUrl = config.url.replace( + /\$\{([^}]+)\}/g, + (_, name) => process.env[name] ?? "", + ); + transport = new StreamableHTTPClientTransport(new URL(resolvedUrl)); } else { throw new Error(`Server "${name}" has unsupported transport: ${config.transport}`); } diff --git a/src/resources/extensions/search-the-web/tool-search.ts b/src/resources/extensions/search-the-web/tool-search.ts index 54dab89b0..399a399df 100644 --- a/src/resources/extensions/search-the-web/tool-search.ts +++ b/src/resources/extensions/search-the-web/tool-search.ts @@ -398,16 +398,16 @@ export function registerSearchTool(pi: ExtensionAPI) { // with brief interruptions every MAX_CONSECUTIVE_DUPES+1 calls. if (cacheKey === lastSearchKey) { consecutiveDupeCount++; - if (consecutiveDupeCount >= MAX_CONSECUTIVE_DUPES) { + if (consecutiveDupeCount > MAX_CONSECUTIVE_DUPES) { return { - content: [{ type: "text" as const, text: `⚠️ Search loop detected: the query "${params.query}" has been searched ${consecutiveDupeCount + 1} times consecutively with identical results. The information you need is already in the previous search results above. Stop searching and use those results to proceed with your task.` }], + content: [{ type: "text" as const, text: `⚠️ Search loop detected: the query "${params.query}" has been searched ${consecutiveDupeCount} times consecutively with identical results. The information you need is already in the previous search results above. Stop searching and use those results to proceed with your task.` }], isError: true, details: { errorKind: "search_loop", error: "Consecutive duplicate search detected" } satisfies Partial, }; } } else { lastSearchKey = cacheKey; - consecutiveDupeCount = 0; + consecutiveDupeCount = 1; } const cached = searchCache.get(cacheKey); diff --git a/src/tests/search-loop-guard.test.ts b/src/tests/search-loop-guard.test.ts index 266b5155a..6413bef32 100644 --- a/src/tests/search-loop-guard.test.ts +++ b/src/tests/search-loop-guard.test.ts @@ -14,6 +14,23 @@ import assert from "node:assert/strict"; import { registerSearchTool } from "../resources/extensions/search-the-web/tool-search.ts"; import searchExtension from "../resources/extensions/search-the-web/index.ts"; +const ORIGINAL_ENV = { + BRAVE_API_KEY: process.env.BRAVE_API_KEY, + TAVILY_API_KEY: process.env.TAVILY_API_KEY, + OLLAMA_API_KEY: process.env.OLLAMA_API_KEY, +}; + +function restoreSearchEnv() { + if (ORIGINAL_ENV.BRAVE_API_KEY === undefined) delete process.env.BRAVE_API_KEY; + else process.env.BRAVE_API_KEY = ORIGINAL_ENV.BRAVE_API_KEY; + + if (ORIGINAL_ENV.TAVILY_API_KEY === undefined) delete process.env.TAVILY_API_KEY; + else process.env.TAVILY_API_KEY = ORIGINAL_ENV.TAVILY_API_KEY; + + if (ORIGINAL_ENV.OLLAMA_API_KEY === undefined) delete process.env.OLLAMA_API_KEY; + else process.env.OLLAMA_API_KEY = ORIGINAL_ENV.OLLAMA_API_KEY; +} + // ============================================================================= // Mock helpers // ============================================================================= @@ -101,6 +118,8 @@ async function callSearch( test("search loop guard fires after MAX_CONSECUTIVE_DUPES duplicates", async () => { process.env.BRAVE_API_KEY = "test-key-loop-guard"; + delete process.env.TAVILY_API_KEY; + delete process.env.OLLAMA_API_KEY; const restoreFetch = mockFetch(makeBraveResponse()); try { @@ -127,12 +146,14 @@ test("search loop guard fires after MAX_CONSECUTIVE_DUPES duplicates", async () ); } finally { restoreFetch(); - delete process.env.BRAVE_API_KEY; + restoreSearchEnv(); } }); test("search loop guard resets at session_start boundary", async () => { process.env.BRAVE_API_KEY = "test-key-loop-guard-session"; + delete process.env.TAVILY_API_KEY; + delete process.env.OLLAMA_API_KEY; const restoreFetch = mockFetch(makeBraveResponse()); const query = "session boundary query"; @@ -167,12 +188,14 @@ test("search loop guard resets at session_start boundary", async () => { ); } finally { restoreFetch(); - delete process.env.BRAVE_API_KEY; + restoreSearchEnv(); } }); test("search loop guard stays armed after firing — subsequent duplicates immediately re-trigger (#1671)", async () => { process.env.BRAVE_API_KEY = "test-key-loop-guard-2"; + delete process.env.TAVILY_API_KEY; + delete process.env.OLLAMA_API_KEY; const restoreFetch = mockFetch(makeBraveResponse()); // Use a unique query so module-level state from previous test doesn't interfere @@ -209,12 +232,14 @@ test("search loop guard stays armed after firing — subsequent duplicates immed ); } finally { restoreFetch(); - delete process.env.BRAVE_API_KEY; + restoreSearchEnv(); } }); test("search loop guard resets cleanly when a different query is issued", async () => { process.env.BRAVE_API_KEY = "test-key-loop-guard-3"; + delete process.env.TAVILY_API_KEY; + delete process.env.OLLAMA_API_KEY; const restoreFetch = mockFetch(makeBraveResponse()); const queryA = "query alpha reset test"; @@ -239,6 +264,6 @@ test("search loop guard resets cleanly when a different query is issued", async ); } finally { restoreFetch(); - delete process.env.BRAVE_API_KEY; + restoreSearchEnv(); } }); diff --git a/src/tests/startup-perf.test.ts b/src/tests/startup-perf.test.ts new file mode 100644 index 000000000..cd97cc59a --- /dev/null +++ b/src/tests/startup-perf.test.ts @@ -0,0 +1,160 @@ +import { describe, it, beforeEach, afterEach } from "node:test"; +import assert from "node:assert/strict"; +import * as fs from "node:fs"; +import * as os from "node:os"; +import * as path from "node:path"; + +// ─── Pre-compiled extension loading ────────────────────────────────────────── + +describe("pre-compiled extension loading", () => { + let tmpDir: string; + + beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "precompiled-ext-")); + }); + + afterEach(() => { + try { + fs.rmSync(tmpDir, { recursive: true, force: true, maxRetries: 3 }); + } catch { + // Ignore cleanup errors on Windows + } + }); + + it("prefers .js sibling over .ts when .js is newer", async () => { + // Create a .ts file + const tsPath = path.join(tmpDir, "ext.ts"); + fs.writeFileSync(tsPath, `export default function ext() { return "ts"; }`); + + // Create a .js file with a newer mtime + const jsPath = path.join(tmpDir, "ext.js"); + fs.writeFileSync(jsPath, `export default function ext() { return "js"; }`); + + // Make .js newer than .ts + const now = new Date(); + const past = new Date(now.getTime() - 10_000); + fs.utimesSync(tsPath, past, past); + fs.utimesSync(jsPath, now, now); + + const tsStat = fs.statSync(tsPath); + const jsStat = fs.statSync(jsPath); + assert.ok(jsStat.mtimeMs >= tsStat.mtimeMs, ".js should have matching or newer mtime"); + }); + + it("falls back to .ts when no .js sibling exists", () => { + const tsPath = path.join(tmpDir, "ext.ts"); + fs.writeFileSync(tsPath, `export default function ext() { return "ts"; }`); + + const jsPath = path.join(tmpDir, "ext.js"); + assert.ok(!fs.existsSync(jsPath), ".js should not exist"); + }); + + it("falls back to .ts when .js is older", () => { + const tsPath = path.join(tmpDir, "ext.ts"); + fs.writeFileSync(tsPath, `export default function ext() { return "ts"; }`); + + const jsPath = path.join(tmpDir, "ext.js"); + fs.writeFileSync(jsPath, `export default function ext() { return "js-stale"; }`); + + // Make .ts newer + const now = new Date(); + const past = new Date(now.getTime() - 10_000); + fs.utimesSync(jsPath, past, past); + fs.utimesSync(tsPath, now, now); + + const tsStat = fs.statSync(tsPath); + const jsStat = fs.statSync(jsPath); + assert.ok(jsStat.mtimeMs < tsStat.mtimeMs, ".js should be older than .ts"); + }); +}); + +// ─── Batch directory discovery ─────────────────────────────────────────────── + +describe("batch directory discovery", () => { + let tmpDir: string; + + beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "batch-discover-")); + }); + + afterEach(() => { + try { + fs.rmSync(tmpDir, { recursive: true, force: true, maxRetries: 3 }); + } catch { + // Ignore cleanup errors on Windows + } + }); + + it("single readdir discovers existing subdirectories", () => { + // Create some resource subdirectories + fs.mkdirSync(path.join(tmpDir, "extensions")); + fs.mkdirSync(path.join(tmpDir, "skills")); + // prompts and themes do NOT exist + + const entries = fs.readdirSync(tmpDir, { withFileTypes: true }); + const subdirs = new Set( + entries.filter((e) => e.isDirectory()).map((e) => e.name), + ); + + assert.ok(subdirs.has("extensions")); + assert.ok(subdirs.has("skills")); + assert.ok(!subdirs.has("prompts")); + assert.ok(!subdirs.has("themes")); + }); + + it("returns empty set for non-existent parent directory", () => { + const missing = path.join(tmpDir, "does-not-exist"); + let subdirs = new Set(); + try { + const entries = fs.readdirSync(missing, { withFileTypes: true }); + subdirs = new Set( + entries.filter((e) => e.isDirectory()).map((e) => e.name), + ); + } catch { + subdirs = new Set(); + } + + assert.equal(subdirs.size, 0); + }); +}); + +// ─── Node.js compile cache ────────────────────────────────────────────────── + +describe("Node.js compile cache env setup", () => { + it("NODE_COMPILE_CACHE is settable on Node 22+", () => { + const nodeVersion = parseInt(process.versions.node); + if (nodeVersion >= 22) { + // Verify the env var mechanism works (does not throw) + const original = process.env.NODE_COMPILE_CACHE; + try { + process.env.NODE_COMPILE_CACHE = path.join(os.tmpdir(), ".test-compile-cache"); + assert.equal( + process.env.NODE_COMPILE_CACHE, + path.join(os.tmpdir(), ".test-compile-cache"), + ); + } finally { + if (original === undefined) { + delete process.env.NODE_COMPILE_CACHE; + } else { + process.env.NODE_COMPILE_CACHE = original; + } + } + } + }); + + it("does not overwrite existing NODE_COMPILE_CACHE", () => { + const original = process.env.NODE_COMPILE_CACHE; + try { + process.env.NODE_COMPILE_CACHE = "/custom/cache"; + // Simulate the ??= behavior from cli.ts + process.env.NODE_COMPILE_CACHE ??= "/should-not-overwrite"; + assert.equal(process.env.NODE_COMPILE_CACHE, "/custom/cache"); + } finally { + if (original === undefined) { + delete process.env.NODE_COMPILE_CACHE; + } else { + process.env.NODE_COMPILE_CACHE = original; + } + } + }); +}); diff --git a/src/tests/web-boot-node24.test.ts b/src/tests/web-boot-node24.test.ts index f103070cf..dd587aefa 100644 --- a/src/tests/web-boot-node24.test.ts +++ b/src/tests/web-boot-node24.test.ts @@ -151,3 +151,26 @@ test("boot route returns { error } JSON on handler failure", async () => { "boot route must return status 500 on error", ) }) + +// --------------------------------------------------------------------------- +// Bug 4 — bridge-service must import readdirSync for session listing (#1936) +// --------------------------------------------------------------------------- + +test("bridge-service imports readdirSync from node:fs (#1936)", async () => { + // The boot payload calls listProjectSessions which uses readdirSync. + // A missing import causes ReferenceError → HTTP 500 on /api/boot. + const { readFileSync } = await import("node:fs") + const { join } = await import("node:path") + + const bridgeSource = readFileSync( + join(process.cwd(), "src", "web", "bridge-service.ts"), + "utf-8", + ) + + assert.match( + bridgeSource, + /import\s*\{[^}]*readdirSync[^}]*\}\s*from\s*["']node:fs["']/, + "bridge-service.ts must import readdirSync from node:fs — " + + "removing it breaks /api/boot with ReferenceError (see #1936)", + ) +}) diff --git a/src/tests/web-bridge-contract.test.ts b/src/tests/web-bridge-contract.test.ts index 1f29ad4ab..cf85c2d85 100644 --- a/src/tests/web-bridge-contract.test.ts +++ b/src/tests/web-bridge-contract.test.ts @@ -659,3 +659,77 @@ test("bridge command/runtime failures are inspectable and redact secret material fixture.cleanup(); } }); + +// --------------------------------------------------------------------------- +// Bug — readdirSync must be available in bridge-service for session listing +// (Fixes #1936: /api/boot returns 500 when readdirSync is missing) +// --------------------------------------------------------------------------- + +test("/api/boot lists sessions from the real filesystem via readdirSync (#1936)", async () => { + const fixture = makeWorkspaceFixture(); + const sessionPath = createSessionFile(fixture.projectCwd, fixture.sessionsDir, "sess-fs", "FS Session"); + const harness = createHarness((command, current) => { + if (command.type === "get_state") { + current.emit({ + id: command.id, + type: "response", + command: "get_state", + success: true, + data: { + sessionId: "sess-fs", + sessionFile: sessionPath, + thinkingLevel: "off", + isStreaming: false, + isCompacting: false, + steeringMode: "all", + followUpMode: "all", + autoCompactionEnabled: false, + autoRetryEnabled: false, + retryInProgress: false, + retryAttempt: 0, + messageCount: 0, + pendingMessageCount: 0, + }, + }); + return; + } + assert.fail(`unexpected command during boot: ${command.type}`); + }); + + // Deliberately omit listSessions so the real listProjectSessions (which + // calls readdirSync) is exercised. If readdirSync is missing from the + // bridge-service node:fs import, this test will throw ReferenceError. + bridge.configureBridgeServiceForTests({ + env: { + ...process.env, + GSD_WEB_PROJECT_CWD: fixture.projectCwd, + GSD_WEB_PROJECT_SESSIONS_DIR: fixture.sessionsDir, + GSD_WEB_PACKAGE_ROOT: repoRoot, + }, + spawn: harness.spawn, + indexWorkspace: async () => fakeWorkspaceIndex(), + getAutoDashboardData: () => fakeAutoDashboardData(), + getOnboardingNeeded: () => false, + }); + + try { + const response = await bootRoute.GET(); + assert.equal(response.status, 200, "/api/boot must not return 500 — readdirSync must be available"); + const payload = await response.json() as any; + + // The real listProjectSessions should have found the session file via readdirSync + assert.ok( + Array.isArray(payload.resumableSessions), + "boot payload must include resumableSessions array", + ); + assert.equal( + payload.resumableSessions.length, + 1, + "readdirSync-based session listing must find the test session file", + ); + assert.equal(payload.resumableSessions[0].id, "sess-fs"); + } finally { + await bridge.resetBridgeServiceForTests(); + fixture.cleanup(); + } +}); diff --git a/src/tests/web-onboarding-contract.test.ts b/src/tests/web-onboarding-contract.test.ts index 5d0be31af..d757d9f6a 100644 --- a/src/tests/web-onboarding-contract.test.ts +++ b/src/tests/web-onboarding-contract.test.ts @@ -15,6 +15,59 @@ const onboardingRoute = await import("../../web/app/api/onboarding/route.ts"); const commandRoute = await import("../../web/app/api/session/command/route.ts"); const { AuthStorage } = await import("@gsd/pi-coding-agent"); +const ONBOARDING_ENV_KEYS = [ + "GITHUB_TOKEN", + "GH_TOKEN", + "COPILOT_GITHUB_TOKEN", + "ANTHROPIC_OAUTH_TOKEN", + "ANTHROPIC_API_KEY", + "OPENAI_API_KEY", + "AZURE_OPENAI_API_KEY", + "GEMINI_API_KEY", + "GOOGLE_APPLICATION_CREDENTIALS", + "GOOGLE_CLOUD_PROJECT", + "GCLOUD_PROJECT", + "GOOGLE_CLOUD_LOCATION", + "GROQ_API_KEY", + "CEREBRAS_API_KEY", + "XAI_API_KEY", + "OPENROUTER_API_KEY", + "AI_GATEWAY_API_KEY", + "ZAI_API_KEY", + "MISTRAL_API_KEY", + "MINIMAX_API_KEY", + "MINIMAX_CN_API_KEY", + "HF_TOKEN", + "OPENCODE_API_KEY", + "KIMI_API_KEY", + "ALIBABA_API_KEY", + "AWS_PROFILE", + "AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY", + "AWS_BEARER_TOKEN_BEDROCK", + "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI", + "AWS_CONTAINER_CREDENTIALS_FULL_URI", + "AWS_WEB_IDENTITY_TOKEN_FILE", +] as const; + +const ORIGINAL_ONBOARDING_ENV = Object.fromEntries( + ONBOARDING_ENV_KEYS.map((key) => [key, process.env[key]]), +) as Record<(typeof ONBOARDING_ENV_KEYS)[number], string | undefined>; + +function clearOnboardingEnv(): void { + for (const key of ONBOARDING_ENV_KEYS) { + delete process.env[key]; + } +} + +function restoreOnboardingEnv(): void { + for (const key of ONBOARDING_ENV_KEYS) { + const value = ORIGINAL_ONBOARDING_ENV[key]; + if (value === undefined) delete process.env[key]; + else process.env[key] = value; + } +} + class FakeRpcChild extends EventEmitter { stdin = new PassThrough(); stdout = new PassThrough(); @@ -52,6 +105,16 @@ function attachJsonLineReader(stream: PassThrough, onLine: (line: string) => voi }); } +function noEnvApiKey(): null { + return null; +} + +function projectRequest(projectCwd: string, url: string, init?: RequestInit): Request { + const base = new URL(url, "http://localhost"); + base.searchParams.set("project", projectCwd); + return new Request(base, init); +} + function makeWorkspaceFixture(): { projectCwd: string; sessionsDir: string; cleanup: () => void } { const root = mkdtempSync(join(tmpdir(), "gsd-web-onboarding-")); const projectCwd = join(root, "project"); @@ -229,7 +292,6 @@ function configureBridgeFixture(fixture: { projectCwd: string; sessionsDir: stri bridge.configureBridgeServiceForTests({ env: { - ...process.env, GSD_WEB_PROJECT_CWD: fixture.projectCwd, GSD_WEB_PROJECT_SESSIONS_DIR: fixture.sessionsDir, GSD_WEB_PACKAGE_ROOT: repoRoot, @@ -244,12 +306,13 @@ function configureBridgeFixture(fixture: { projectCwd: string; sessionsDir: stri test("boot and onboarding routes expose locked required state plus explicitly skippable optional setup when auth is missing", async () => { const fixture = makeWorkspaceFixture(); + clearOnboardingEnv(); const authStorage = AuthStorage.inMemory({}); configureBridgeFixture(fixture, "sess-missing-auth"); - onboarding.configureOnboardingServiceForTests({ authStorage }); + onboarding.configureOnboardingServiceForTests({ authStorage, getEnvApiKey: noEnvApiKey }); try { - const bootResponse = await bootRoute.GET(); + const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot")); assert.equal(bootResponse.status, 200); const bootPayload = (await bootResponse.json()) as any; @@ -281,7 +344,7 @@ test("boot and onboarding routes expose locked required state plus explicitly sk assert.equal(anthropicProvider.supports.apiKey, true); assert.equal(anthropicProvider.supports.oauthAvailable, true); - const onboardingResponse = await onboardingRoute.GET(); + const onboardingResponse = await onboardingRoute.GET(projectRequest(fixture.projectCwd, "/api/onboarding")); assert.equal(onboardingResponse.status, 200); const onboardingPayload = (await onboardingResponse.json()) as any; assert.equal(onboardingPayload.onboarding.locked, true); @@ -289,20 +352,25 @@ test("boot and onboarding routes expose locked required state plus explicitly sk } finally { onboarding.resetOnboardingServiceForTests(); await bridge.resetBridgeServiceForTests(); + restoreOnboardingEnv(); fixture.cleanup(); } }); test("runtime env-backed auth unlocks boot onboarding state and reports the environment source", async () => { const fixture = makeWorkspaceFixture(); + clearOnboardingEnv(); const authStorage = AuthStorage.inMemory({}); const previousGithubToken = process.env.GITHUB_TOKEN; process.env.GITHUB_TOKEN = "ghu_runtime_env_token"; configureBridgeFixture(fixture, "sess-env-auth"); - onboarding.configureOnboardingServiceForTests({ authStorage }); + onboarding.configureOnboardingServiceForTests({ + authStorage, + getEnvApiKey: (provider: string) => (provider === "github-copilot" ? process.env.GITHUB_TOKEN : undefined), + }); try { - const bootResponse = await bootRoute.GET(); + const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot")); assert.equal(bootResponse.status, 200); const bootPayload = (await bootResponse.json()) as any; @@ -325,16 +393,19 @@ test("runtime env-backed auth unlocks boot onboarding state and reports the envi } onboarding.resetOnboardingServiceForTests(); await bridge.resetBridgeServiceForTests(); + restoreOnboardingEnv(); fixture.cleanup(); } }); test("failed API-key validation stays locked, redacts the error, and is reflected in boot state without persisting auth", async () => { const fixture = makeWorkspaceFixture(); + clearOnboardingEnv(); const authStorage = AuthStorage.inMemory({}); configureBridgeFixture(fixture, "sess-validation-failure"); onboarding.configureOnboardingServiceForTests({ authStorage, + getEnvApiKey: noEnvApiKey, validateApiKey: async () => ({ ok: false, message: "OpenAI rejected sk-test-secret-123456 because Bearer sk-test-secret-123456 is invalid", @@ -343,7 +414,7 @@ test("failed API-key validation stays locked, redacts the error, and is reflecte try { const validationResponse = await onboardingRoute.POST( - new Request("http://localhost/api/onboarding", { + projectRequest(fixture.projectCwd, "/api/onboarding", { method: "POST", body: JSON.stringify({ action: "save_api_key", @@ -366,7 +437,7 @@ test("failed API-key validation stays locked, redacts the error, and is reflecte assert.doesNotMatch(validationPayload.onboarding.lastValidation.message, /sk-test-secret-123456/); assert.equal(authStorage.hasAuth("openai"), false); - const bootResponse = await bootRoute.GET(); + const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot")); assert.equal(bootResponse.status, 200); const bootPayload = (await bootResponse.json()) as any; assert.equal(bootPayload.onboarding.locked, true); @@ -375,19 +446,21 @@ test("failed API-key validation stays locked, redacts the error, and is reflecte } finally { onboarding.resetOnboardingServiceForTests(); await bridge.resetBridgeServiceForTests(); + restoreOnboardingEnv(); fixture.cleanup(); } }); test("direct prompt commands cannot bypass onboarding while required setup is still locked", async () => { const fixture = makeWorkspaceFixture(); + clearOnboardingEnv(); const authStorage = AuthStorage.inMemory({}); const harness = configureBridgeFixture(fixture, "sess-command-locked"); - onboarding.configureOnboardingServiceForTests({ authStorage }); + onboarding.configureOnboardingServiceForTests({ authStorage, getEnvApiKey: noEnvApiKey }); try { const response = await commandRoute.POST( - new Request("http://localhost/api/session/command", { + projectRequest(fixture.projectCwd, "/api/session/command", { method: "POST", body: JSON.stringify({ type: "prompt", message: "hello from bypass attempt" }), }), @@ -403,7 +476,7 @@ test("direct prompt commands cannot bypass onboarding while required setup is st assert.equal(harness.spawnCalls, 0); const stateResponse = await commandRoute.POST( - new Request("http://localhost/api/session/command", { + projectRequest(fixture.projectCwd, "/api/session/command", { method: "POST", body: JSON.stringify({ type: "get_state" }), }), @@ -416,16 +489,19 @@ test("direct prompt commands cannot bypass onboarding while required setup is st } finally { onboarding.resetOnboardingServiceForTests(); await bridge.resetBridgeServiceForTests(); + restoreOnboardingEnv(); fixture.cleanup(); } }); test("bridge auth refresh failures remain inspectable and keep the workspace locked after credentials validate", async () => { const fixture = makeWorkspaceFixture(); + clearOnboardingEnv(); const authStorage = AuthStorage.inMemory({}); configureBridgeFixture(fixture, "sess-refresh-failure"); onboarding.configureOnboardingServiceForTests({ authStorage, + getEnvApiKey: noEnvApiKey, validateApiKey: async () => ({ ok: true, message: "openai credentials validated" }), refreshBridgeAuth: async () => { throw new Error("bridge restart failed for sk-refresh-secret-123456"); @@ -434,7 +510,7 @@ test("bridge auth refresh failures remain inspectable and keep the workspace loc try { const validationResponse = await onboardingRoute.POST( - new Request("http://localhost/api/onboarding", { + projectRequest(fixture.projectCwd, "/api/onboarding", { method: "POST", body: JSON.stringify({ action: "save_api_key", @@ -455,7 +531,7 @@ test("bridge auth refresh failures remain inspectable and keep the workspace loc assert.doesNotMatch(validationPayload.onboarding.bridgeAuthRefresh.error, /sk-refresh-secret-123456/); assert.equal(authStorage.hasAuth("openai"), true); - const bootResponse = await bootRoute.GET(); + const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot")); const bootPayload = (await bootResponse.json()) as any; assert.equal(bootPayload.onboarding.locked, true); assert.equal(bootPayload.onboarding.lockReason, "bridge_refresh_failed"); @@ -463,22 +539,25 @@ test("bridge auth refresh failures remain inspectable and keep the workspace loc } finally { onboarding.resetOnboardingServiceForTests(); await bridge.resetBridgeServiceForTests(); + restoreOnboardingEnv(); fixture.cleanup(); } }); test("successful API-key validation persists the credential and unlocks onboarding", async () => { const fixture = makeWorkspaceFixture(); + clearOnboardingEnv(); const authStorage = AuthStorage.inMemory({}); const harness = configureBridgeFixture(fixture, "sess-validation-success"); onboarding.configureOnboardingServiceForTests({ authStorage, + getEnvApiKey: noEnvApiKey, validateApiKey: async () => ({ ok: true, message: "openai credentials validated" }), }); try { const validationResponse = await onboardingRoute.POST( - new Request("http://localhost/api/onboarding", { + projectRequest(fixture.projectCwd, "/api/onboarding", { method: "POST", body: JSON.stringify({ action: "save_api_key", @@ -502,7 +581,7 @@ test("successful API-key validation persists the credential and unlocks onboardi assert.equal(authStorage.hasAuth("openai"), true); assert.equal(harness.spawnCalls, 1); - const bootResponse = await bootRoute.GET(); + const bootResponse = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot")); const bootPayload = (await bootResponse.json()) as any; assert.equal(bootPayload.onboarding.locked, false); assert.equal(bootPayload.onboarding.lockReason, null); @@ -511,27 +590,29 @@ test("successful API-key validation persists the credential and unlocks onboardi } finally { onboarding.resetOnboardingServiceForTests(); await bridge.resetBridgeServiceForTests(); + restoreOnboardingEnv(); fixture.cleanup(); } }); test("logout_provider removes saved auth, refreshes the bridge, and relocks onboarding when it was the only provider", async () => { const fixture = makeWorkspaceFixture(); + clearOnboardingEnv(); const authStorage = AuthStorage.inMemory({ openai: { type: "api_key", key: "sk-saved-logout" }, } as any); const harness = configureBridgeFixture(fixture, "sess-logout-success"); - onboarding.configureOnboardingServiceForTests({ authStorage }); + onboarding.configureOnboardingServiceForTests({ authStorage, getEnvApiKey: noEnvApiKey }); try { - const bootBefore = await bootRoute.GET(); + const bootBefore = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot")); const bootBeforePayload = (await bootBefore.json()) as any; assert.equal(bootBeforePayload.onboarding.locked, false); assert.equal(bootBeforePayload.onboarding.required.satisfiedBy.providerId, "openai"); assert.equal(harness.spawnCalls, 1); const logoutResponse = await onboardingRoute.POST( - new Request("http://localhost/api/onboarding", { + projectRequest(fixture.projectCwd, "/api/onboarding", { method: "POST", body: JSON.stringify({ action: "logout_provider", @@ -549,7 +630,7 @@ test("logout_provider removes saved auth, refreshes the bridge, and relocks onbo assert.equal(authStorage.hasAuth("openai"), false); assert.equal(harness.spawnCalls, 2); - const bootAfter = await bootRoute.GET(); + const bootAfter = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot")); const bootAfterPayload = (await bootAfter.json()) as any; assert.equal(bootAfterPayload.onboarding.locked, true); assert.equal(bootAfterPayload.onboarding.lockReason, "required_setup"); @@ -558,27 +639,32 @@ test("logout_provider removes saved auth, refreshes the bridge, and relocks onbo } finally { onboarding.resetOnboardingServiceForTests(); await bridge.resetBridgeServiceForTests(); + restoreOnboardingEnv(); fixture.cleanup(); } }); test("logout_provider fails clearly for environment-backed auth that the browser cannot remove", async () => { const fixture = makeWorkspaceFixture(); + clearOnboardingEnv(); const authStorage = AuthStorage.inMemory({}); const previousGithubToken = process.env.GITHUB_TOKEN; process.env.GITHUB_TOKEN = "ghu_env_only_token"; configureBridgeFixture(fixture, "sess-logout-env"); - onboarding.configureOnboardingServiceForTests({ authStorage }); + onboarding.configureOnboardingServiceForTests({ + authStorage, + getEnvApiKey: (provider: string) => (provider === "github-copilot" ? process.env.GITHUB_TOKEN : undefined), + }); try { - const bootBefore = await bootRoute.GET(); + const bootBefore = await bootRoute.GET(projectRequest(fixture.projectCwd, "/api/boot")); const bootBeforePayload = (await bootBefore.json()) as any; assert.equal(bootBeforePayload.onboarding.locked, false); assert.equal(bootBeforePayload.onboarding.required.satisfiedBy.providerId, "github-copilot"); assert.equal(bootBeforePayload.onboarding.required.satisfiedBy.source, "environment"); const logoutResponse = await onboardingRoute.POST( - new Request("http://localhost/api/onboarding", { + projectRequest(fixture.projectCwd, "/api/onboarding", { method: "POST", body: JSON.stringify({ action: "logout_provider", @@ -601,6 +687,7 @@ test("logout_provider fails clearly for environment-backed auth that the browser } onboarding.resetOnboardingServiceForTests(); await bridge.resetBridgeServiceForTests(); + restoreOnboardingEnv(); fixture.cleanup(); } }); diff --git a/src/tests/web-subprocess-module-resolution.test.ts b/src/tests/web-subprocess-module-resolution.test.ts new file mode 100644 index 000000000..3c10d8057 --- /dev/null +++ b/src/tests/web-subprocess-module-resolution.test.ts @@ -0,0 +1,157 @@ +import test from "node:test" +import assert from "node:assert/strict" +import { join } from "node:path" + +import { + isUnderNodeModules, + resolveSubprocessModule, +} from "../web/ts-subprocess-flags.ts" + +// --------------------------------------------------------------------------- +// isUnderNodeModules — exported utility +// --------------------------------------------------------------------------- + +test("isUnderNodeModules returns false for paths outside node_modules", () => { + assert.equal(isUnderNodeModules("/home/user/projects/gsd"), false) +}) + +test("isUnderNodeModules returns true for Unix paths under node_modules/", () => { + assert.equal( + isUnderNodeModules("/usr/lib/node_modules/gsd-pi"), + true, + ) +}) + +test("isUnderNodeModules returns true for Windows paths under node_modules/", () => { + assert.equal( + isUnderNodeModules("C:\\Users\\dev\\AppData\\node_modules\\gsd-pi"), + true, + ) +}) + +test("isUnderNodeModules returns false for substring match without trailing slash", () => { + assert.equal( + isUnderNodeModules("/home/user/my_node_modules_backup/gsd"), + false, + ) +}) + +// --------------------------------------------------------------------------- +// resolveSubprocessModule — resolves .ts → dist .js under node_modules +// --------------------------------------------------------------------------- + +test("resolveSubprocessModule returns source .ts path when NOT under node_modules", () => { + const packageRoot = "/home/user/projects/gsd" + const result = resolveSubprocessModule( + packageRoot, + "resources/extensions/gsd/workspace-index.ts", + // existsSync not needed — should return src path without checking dist + ) + + assert.deepEqual(result, { + modulePath: join(packageRoot, "src", "resources/extensions/gsd/workspace-index.ts"), + useCompiledJs: false, + }) +}) + +test("resolveSubprocessModule returns compiled .js path when under node_modules and dist file exists", () => { + const packageRoot = "/usr/lib/node_modules/gsd-pi" + const distPath = join(packageRoot, "dist", "resources/extensions/gsd/workspace-index.js") + const result = resolveSubprocessModule( + packageRoot, + "resources/extensions/gsd/workspace-index.ts", + (p: string) => p === distPath, + ) + + assert.deepEqual(result, { + modulePath: distPath, + useCompiledJs: true, + }) +}) + +test("resolveSubprocessModule falls back to source .ts when under node_modules but dist file missing", () => { + const packageRoot = "/usr/lib/node_modules/gsd-pi" + const result = resolveSubprocessModule( + packageRoot, + "resources/extensions/gsd/workspace-index.ts", + () => false, // dist file does not exist + ) + + assert.deepEqual(result, { + modulePath: join(packageRoot, "src", "resources/extensions/gsd/workspace-index.ts"), + useCompiledJs: false, + }) +}) + +test("resolveSubprocessModule handles Windows paths under node_modules", () => { + const packageRoot = "C:\\Users\\dev\\AppData\\node_modules\\gsd-pi" + const distPath = join(packageRoot, "dist", "resources/extensions/gsd/auto.js") + const result = resolveSubprocessModule( + packageRoot, + "resources/extensions/gsd/auto.ts", + (p: string) => p === distPath, + ) + + assert.deepEqual(result, { + modulePath: distPath, + useCompiledJs: true, + }) +}) + +test("resolveSubprocessModule strips .ts extension when building dist .js path", () => { + const packageRoot = "/usr/lib/node_modules/gsd-pi" + let checkedPath = "" + resolveSubprocessModule( + packageRoot, + "resources/extensions/gsd/doctor.ts", + (p: string) => { checkedPath = p; return true }, + ) + + assert.equal( + checkedPath, + join(packageRoot, "dist", "resources/extensions/gsd/doctor.js"), + "should check for .js file in dist/, not .ts", + ) +}) + +// --------------------------------------------------------------------------- +// Integration: bridge-service subprocess resolution pattern +// --------------------------------------------------------------------------- + +test("bridge-service workspace-index subprocess uses compiled JS when under node_modules (source audit)", async () => { + // Verify bridge-service.ts calls resolveSubprocessModule for workspace-index + const { readFileSync } = await import("node:fs") + const bridgeSource = readFileSync( + join(process.cwd(), "src", "web", "bridge-service.ts"), + "utf-8", + ) + + assert.match( + bridgeSource, + /resolveSubprocessModule/, + "bridge-service.ts must use resolveSubprocessModule to resolve workspace-index path — " + + "hardcoded .ts paths fail with ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING on Node v24 (see #2279)", + ) +}) + +test("all web service files use resolveSubprocessModule instead of hardcoded .ts paths (source audit)", async () => { + const { readFileSync, readdirSync } = await import("node:fs") + + const serviceFiles = readdirSync(join(process.cwd(), "src", "web")) + .filter((f: string) => f.endsWith("-service.ts")) + + for (const file of serviceFiles) { + const source = readFileSync(join(process.cwd(), "src", "web", file), "utf-8") + + // If the service file imports resolveTypeStrippingFlag it spawns subprocesses + // and must also use resolveSubprocessModule + if (source.includes("resolveTypeStrippingFlag")) { + assert.match( + source, + /resolveSubprocessModule/, + `${file} uses resolveTypeStrippingFlag but does not use resolveSubprocessModule — ` + + "subprocess .ts paths will fail under node_modules/ on Node v24 (#2279)", + ) + } + } +}) diff --git a/src/tests/web-switch-project.test.ts b/src/tests/web-switch-project.test.ts new file mode 100644 index 000000000..eae701fd0 --- /dev/null +++ b/src/tests/web-switch-project.test.ts @@ -0,0 +1,277 @@ +import test, { after, describe } from "node:test"; +import assert from "node:assert/strict"; +import { + mkdtempSync, mkdirSync, rmSync, writeFileSync, readFileSync, + existsSync, statSync, +} from "node:fs"; +import { tmpdir, homedir } from "node:os"; +import { join, resolve } from "node:path"; + +// --------------------------------------------------------------------------- +// Test the core validation + persistence logic used by /api/switch-root +// without pulling in the heavy bridge-service import chain. +// +// The server-side handler does: +// 1. Validate path exists and is a directory +// 2. Resolve tilde + resolve() to absolute path +// 3. Persist devRoot to web-preferences.json (clearing lastActiveProject) +// 4. Discover projects under the new root +// +// We test each concern in isolation using the same logic. +// --------------------------------------------------------------------------- + +// ── Helpers (mirrors /api/switch-root handler logic) ────────────────────── + +function expandTilde(p: string): string { + if (p === "~") return homedir(); + if (p.startsWith("~/")) return homedir() + p.slice(1); + return p; +} + +interface SwitchRootResult { + ok: boolean; + error?: string; + devRoot?: string; +} + +function validateSwitchRoot(rawDevRoot: string): SwitchRootResult { + const trimmed = rawDevRoot.trim(); + if (!trimmed) { + return { ok: false, error: "Missing devRoot in request body" }; + } + + const expanded = expandTilde(trimmed); + const resolved = resolve(expanded); + + if (!existsSync(resolved)) { + return { ok: false, error: `Path does not exist: ${resolved}` }; + } + + try { + const stat = statSync(resolved); + if (!stat.isDirectory()) { + return { ok: false, error: `Not a directory: ${resolved}` }; + } + } catch { + return { ok: false, error: `Cannot access path: ${resolved}` }; + } + + return { ok: true, devRoot: resolved }; +} + +interface WebPreferences { + devRoot?: string; + lastActiveProject?: string; +} + +function persistSwitchRoot( + prefsPath: string, + newDevRoot: string, +): WebPreferences { + let existing: WebPreferences = {}; + try { + if (existsSync(prefsPath)) { + existing = JSON.parse(readFileSync(prefsPath, "utf-8")); + } + } catch { + // Corrupt file — start fresh + } + + const prefs: WebPreferences = { + ...existing, + devRoot: newDevRoot, + lastActiveProject: undefined, + }; + + writeFileSync(prefsPath, JSON.stringify(prefs, null, 2), "utf-8"); + return prefs; +} + +// --------------------------------------------------------------------------- +// Fixtures +// --------------------------------------------------------------------------- + +const tempRoot = mkdtempSync(join(tmpdir(), "gsd-switch-root-")); + +const rootA = join(tempRoot, "root-a"); +mkdirSync(rootA); +mkdirSync(join(rootA, "project-x")); +mkdirSync(join(rootA, "project-x", ".git")); +writeFileSync(join(rootA, "project-x", "package.json"), "{}"); +mkdirSync(join(rootA, "project-y")); + +const rootB = join(tempRoot, "root-b"); +mkdirSync(rootB); +mkdirSync(join(rootB, "project-z")); +writeFileSync(join(rootB, "project-z", "Cargo.toml"), ""); + +const filePath = join(tempRoot, "not-a-dir.txt"); +writeFileSync(filePath, "hello"); + +const prefsDir = join(tempRoot, "prefs"); +mkdirSync(prefsDir); +const prefsPath = join(prefsDir, "web-preferences.json"); + +after(() => { + rmSync(tempRoot, { recursive: true, force: true }); +}); + +// --------------------------------------------------------------------------- +// Tests — Path validation +// --------------------------------------------------------------------------- + +describe("switch-root: path validation", () => { + test("valid directory returns ok with resolved path", () => { + const result = validateSwitchRoot(rootA); + assert.ok(result.ok); + assert.equal(result.devRoot, rootA); + }); + + test("empty string returns error", () => { + const result = validateSwitchRoot(""); + assert.ok(!result.ok); + assert.match(result.error!, /Missing devRoot/); + }); + + test("whitespace-only string returns error", () => { + const result = validateSwitchRoot(" "); + assert.ok(!result.ok); + assert.match(result.error!, /Missing devRoot/); + }); + + test("non-existent path returns error", () => { + const result = validateSwitchRoot(join(tempRoot, "nonexistent-dir")); + assert.ok(!result.ok); + assert.match(result.error!, /does not exist/); + }); + + test("file path (not a directory) returns error", () => { + const result = validateSwitchRoot(filePath); + assert.ok(!result.ok); + assert.match(result.error!, /Not a directory/); + }); + + test("tilde path expands to home directory", () => { + const result = validateSwitchRoot("~"); + // ~ always exists as a directory (user's home) + assert.ok(result.ok, `Expected ok for ~, got error: ${result.error}`); + assert.equal(result.devRoot, homedir()); + }); + + test("resolves relative paths to absolute", () => { + // Create a relative path that's valid from cwd + const result = validateSwitchRoot(rootA); + assert.ok(result.ok); + assert.ok(result.devRoot!.startsWith("/"), "Should be absolute path"); + }); +}); + +// --------------------------------------------------------------------------- +// Tests — Preference persistence +// --------------------------------------------------------------------------- + +describe("switch-root: preference persistence", () => { + test("writes devRoot and clears lastActiveProject", () => { + writeFileSync(prefsPath, JSON.stringify({ + devRoot: rootA, + lastActiveProject: "/old/project", + }, null, 2)); + + const result = persistSwitchRoot(prefsPath, rootB); + + assert.equal(result.devRoot, rootB); + assert.equal(result.lastActiveProject, undefined); + + // Verify on-disk + const onDisk = JSON.parse(readFileSync(prefsPath, "utf-8")); + assert.equal(onDisk.devRoot, rootB); + // undefined is not serialized to JSON + assert.ok( + !("lastActiveProject" in onDisk) || onDisk.lastActiveProject == null, + "lastActiveProject should be cleared", + ); + }); + + test("creates prefs file from scratch", () => { + const freshPath = join(prefsDir, "fresh.json"); + assert.ok(!existsSync(freshPath)); + + persistSwitchRoot(freshPath, rootA); + + assert.ok(existsSync(freshPath)); + const onDisk = JSON.parse(readFileSync(freshPath, "utf-8")); + assert.equal(onDisk.devRoot, rootA); + }); + + test("handles corrupt prefs file gracefully", () => { + writeFileSync(prefsPath, "NOT VALID JSON!!!"); + + const result = persistSwitchRoot(prefsPath, rootB); + assert.equal(result.devRoot, rootB); + + const onDisk = JSON.parse(readFileSync(prefsPath, "utf-8")); + assert.equal(onDisk.devRoot, rootB); + }); + + test("overwrites existing devRoot", () => { + writeFileSync(prefsPath, JSON.stringify({ devRoot: rootA }, null, 2)); + + persistSwitchRoot(prefsPath, rootB); + + const onDisk = JSON.parse(readFileSync(prefsPath, "utf-8")); + assert.equal(onDisk.devRoot, rootB); + assert.notEqual(onDisk.devRoot, rootA); + }); +}); + +// --------------------------------------------------------------------------- +// Tests — Tilde expansion +// --------------------------------------------------------------------------- + +describe("switch-root: tilde expansion", () => { + test("~ expands to home directory", () => { + assert.equal(expandTilde("~"), homedir()); + }); + + test("~/Projects expands correctly", () => { + assert.equal(expandTilde("~/Projects"), `${homedir()}/Projects`); + }); + + test("absolute path is unchanged", () => { + assert.equal(expandTilde("/usr/local/bin"), "/usr/local/bin"); + }); + + test("relative path is unchanged", () => { + assert.equal(expandTilde("relative/path"), "relative/path"); + }); + + test("~user is not expanded (only bare ~ or ~/)", () => { + assert.equal(expandTilde("~other"), "~other"); + }); +}); + +// --------------------------------------------------------------------------- +// Tests — End-to-end switch scenario +// --------------------------------------------------------------------------- + +describe("switch-root: end-to-end scenario", () => { + test("full switch: validate + persist + verify projects change", () => { + // Start with root-a + writeFileSync(prefsPath, JSON.stringify({ + devRoot: rootA, + lastActiveProject: join(rootA, "project-x"), + }, null, 2)); + + // User requests switch to root-b + const validation = validateSwitchRoot(rootB); + assert.ok(validation.ok, `Validation should pass: ${validation.error}`); + + const prefs = persistSwitchRoot(prefsPath, validation.devRoot!); + assert.equal(prefs.devRoot, rootB); + assert.equal(prefs.lastActiveProject, undefined); + + // Verify on-disk state + const finalPrefs = JSON.parse(readFileSync(prefsPath, "utf-8")); + assert.equal(finalPrefs.devRoot, rootB); + }); +}); diff --git a/src/web-mode.ts b/src/web-mode.ts index 08696bcf1..42683a667 100644 --- a/src/web-mode.ts +++ b/src/web-mode.ts @@ -687,7 +687,12 @@ export async function launchWebMode( // Register in multi-instance registry registerInstance(options.cwd, { pid, port, url }, deps.registryPath) } - ;(deps.openBrowser ?? openBrowser)(`${url}/#token=${authToken}`) + const authenticatedUrl = `${url}/#token=${authToken}` + try { + ;(deps.openBrowser ?? openBrowser)(authenticatedUrl) + } catch (browserError) { + stderr.write(`[gsd] Could not open browser: ${browserError instanceof Error ? browserError.message : String(browserError)}\n`) + } } catch (error) { const failure: WebModeLaunchFailure = { mode: 'web', @@ -706,6 +711,7 @@ export async function launchWebMode( return failure } + const authenticatedUrl = `${url}/#token=${authToken}` const success: WebModeLaunchSuccess = { mode: 'web', ok: true, @@ -718,7 +724,7 @@ export async function launchWebMode( hostPath: resolution.entryPath, hostRoot: resolution.hostRoot, } - stderr.write(`[gsd] Ready → ${url}\n`) + stderr.write(`[gsd] Ready → ${authenticatedUrl}\n`) emitLaunchStatus(stderr, success) return success } diff --git a/src/web/auto-dashboard-service.ts b/src/web/auto-dashboard-service.ts index fdce2c0c9..58c62a4ad 100644 --- a/src/web/auto-dashboard-service.ts +++ b/src/web/auto-dashboard-service.ts @@ -4,7 +4,7 @@ import { join } from "node:path"; import { pathToFileURL } from "node:url"; import type { AutoDashboardData } from "./bridge-service.ts"; -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" const AUTO_DASHBOARD_MAX_BUFFER = 1024 * 1024; const TEST_AUTO_DASHBOARD_MODULE_ENV = "GSD_WEB_TEST_AUTO_DASHBOARD_MODULE"; @@ -32,10 +32,6 @@ function fallbackAutoDashboardData(): AutoDashboardData { }; } -function resolveAutoDashboardModulePath(packageRoot: string, env: NodeJS.ProcessEnv): string { - return env[TEST_AUTO_DASHBOARD_MODULE_ENV] || join(packageRoot, "src", "resources", "extensions", "gsd", "auto.ts"); -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs"); } @@ -55,11 +51,20 @@ export async function collectAuthoritativeAutoDashboardData( const checkExists = options.existsSync ?? existsSync; const resolveTsLoader = resolveTsLoaderPath(packageRoot); - const autoModulePath = resolveAutoDashboardModulePath(packageRoot, env); - if (!checkExists(resolveTsLoader) || !checkExists(autoModulePath)) { + // Use test override if provided; otherwise resolve via resolveSubprocessModule + const testModulePath = env[TEST_AUTO_DASHBOARD_MODULE_ENV]; + const moduleResolution = testModulePath + ? { modulePath: testModulePath, useCompiledJs: false } + : resolveSubprocessModule(packageRoot, "resources/extensions/gsd/auto.ts", checkExists); + const autoModulePath = moduleResolution.modulePath; + + if (!moduleResolution.useCompiledJs && (!checkExists(resolveTsLoader) || !checkExists(autoModulePath))) { throw new Error(`authoritative auto dashboard provider not found; checked=${resolveTsLoader},${autoModulePath}`); } + if (moduleResolution.useCompiledJs && !checkExists(autoModulePath)) { + throw new Error(`authoritative auto dashboard provider not found; checked=${autoModulePath}`); + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -68,14 +73,17 @@ export async function collectAuthoritativeAutoDashboardData( 'process.stdout.write(JSON.stringify(result));', ].join(" "); + const prefixArgs = buildSubprocessPrefixArgs( + packageRoot, + moduleResolution, + pathToFileURL(resolveTsLoader).href, + ); + return await new Promise((resolveResult, reject) => { execFile( options.execPath ?? process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/bridge-service.ts b/src/web/bridge-service.ts index 32ed1048b..ebac2e8b1 100644 --- a/src/web/bridge-service.ts +++ b/src/web/bridge-service.ts @@ -4,7 +4,7 @@ import { StringDecoder } from "node:string_decoder"; import type { Readable } from "node:stream"; import { join, resolve, dirname } from "node:path"; import { fileURLToPath, pathToFileURL } from "node:url"; -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts"; +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts"; import type { AgentSessionEvent, SessionStateChangeReason } from "../../packages/pi-coding-agent/src/core/agent-session.ts"; import type { @@ -905,12 +905,20 @@ async function loadCachedWorkspaceIndex( async function loadWorkspaceIndexViaChildProcess(basePath: string, packageRoot: string): Promise { const deps = getBridgeDeps(); - const resolveTsLoader = join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs"); - const workspaceModulePath = join(packageRoot, "src", "resources", "extensions", "gsd", "workspace-index.ts"); const checkExists = deps.existsSync ?? existsSync; - if (!checkExists(resolveTsLoader) || !checkExists(workspaceModulePath)) { + const resolveTsLoader = join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs"); + const moduleResolution = resolveSubprocessModule( + packageRoot, + "resources/extensions/gsd/workspace-index.ts", + checkExists, + ); + const workspaceModulePath = moduleResolution.modulePath; + if (!moduleResolution.useCompiledJs && (!checkExists(resolveTsLoader) || !checkExists(workspaceModulePath))) { throw new Error(`workspace index loader not found; checked=${resolveTsLoader},${workspaceModulePath}`); } + if (moduleResolution.useCompiledJs && !checkExists(workspaceModulePath)) { + throw new Error(`workspace index module not found; checked=${workspaceModulePath}`); + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -919,14 +927,17 @@ async function loadWorkspaceIndexViaChildProcess(basePath: string, packageRoot: 'process.stdout.write(JSON.stringify(result));', ].join(' '); + const prefixArgs = buildSubprocessPrefixArgs( + packageRoot, + moduleResolution, + pathToFileURL(resolveTsLoader).href, + ); + return await new Promise((resolveResult, reject) => { execFile( deps.execPath ?? process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/captures-service.ts b/src/web/captures-service.ts index 938cdf396..1f7cb1189 100644 --- a/src/web/captures-service.ts +++ b/src/web/captures-service.ts @@ -4,16 +4,12 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { CapturesData, CaptureResolveRequest, CaptureResolveResult } from "../../web/lib/knowledge-captures-types.ts" const CAPTURES_MAX_BUFFER = 2 * 1024 * 1024 const CAPTURES_MODULE_ENV = "GSD_CAPTURES_MODULE" -function resolveCapturesModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "captures.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -28,13 +24,17 @@ export async function collectCapturesData(projectCwdOverride?: string): Promise< const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const capturesModulePath = resolveCapturesModulePath(packageRoot) + const moduleResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/captures.ts") + const capturesModulePath = moduleResolution.modulePath - if (!existsSync(resolveTsLoader) || !existsSync(capturesModulePath)) { + if (!moduleResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(capturesModulePath))) { throw new Error( `captures data provider not found; checked=${resolveTsLoader},${capturesModulePath}`, ) } + if (moduleResolution.useCompiledJs && !existsSync(capturesModulePath)) { + throw new Error(`captures data provider not found; checked=${capturesModulePath}`) + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -46,14 +46,13 @@ export async function collectCapturesData(projectCwdOverride?: string): Promise< 'process.stdout.write(JSON.stringify(result));', ].join(" ") + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, moduleResolution, pathToFileURL(resolveTsLoader).href) + return await new Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], @@ -95,13 +94,17 @@ export async function resolveCaptureAction(request: CaptureResolveRequest, proje const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const capturesModulePath = resolveCapturesModulePath(packageRoot) + const moduleResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/captures.ts") + const capturesModulePath = moduleResolution.modulePath - if (!existsSync(resolveTsLoader) || !existsSync(capturesModulePath)) { + if (!moduleResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(capturesModulePath))) { throw new Error( `captures data provider not found; checked=${resolveTsLoader},${capturesModulePath}`, ) } + if (moduleResolution.useCompiledJs && !existsSync(capturesModulePath)) { + throw new Error(`captures data provider not found; checked=${capturesModulePath}`) + } const safeId = JSON.stringify(request.captureId) const safeClassification = JSON.stringify(request.classification) @@ -115,14 +118,13 @@ export async function resolveCaptureAction(request: CaptureResolveRequest, proje `process.stdout.write(JSON.stringify({ ok: true, captureId: ${safeId} }));`, ].join(" ") + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, moduleResolution, pathToFileURL(resolveTsLoader).href) + return await new Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/cleanup-service.ts b/src/web/cleanup-service.ts index a83ba40f3..145201f31 100644 --- a/src/web/cleanup-service.ts +++ b/src/web/cleanup-service.ts @@ -4,16 +4,12 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { CleanupData, CleanupResult } from "../../web/lib/remaining-command-types.ts" const CLEANUP_MAX_BUFFER = 2 * 1024 * 1024 const CLEANUP_MODULE_ENV = "GSD_CLEANUP_MODULE" -function resolveCleanupModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "native-git-bridge.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -28,13 +24,17 @@ export async function collectCleanupData(projectCwdOverride?: string): Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], @@ -114,13 +113,17 @@ export async function executeCleanup( const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const cleanupModulePath = resolveCleanupModulePath(packageRoot) + const moduleResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/native-git-bridge.ts") + const cleanupModulePath = moduleResolution.modulePath - if (!existsSync(resolveTsLoader) || !existsSync(cleanupModulePath)) { + if (!moduleResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(cleanupModulePath))) { throw new Error( `cleanup service modules not found; checked=${resolveTsLoader},${cleanupModulePath}`, ) } + if (moduleResolution.useCompiledJs && !existsSync(cleanupModulePath)) { + throw new Error(`cleanup service modules not found; checked=${cleanupModulePath}`) + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -147,14 +150,13 @@ export async function executeCleanup( 'process.stdout.write(JSON.stringify({ deletedBranches, prunedSnapshots, message }));', ].join(" ") + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, moduleResolution, pathToFileURL(resolveTsLoader).href) + return await new Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/doctor-service.ts b/src/web/doctor-service.ts index 755f155b3..8fac5b272 100644 --- a/src/web/doctor-service.ts +++ b/src/web/doctor-service.ts @@ -4,47 +4,31 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { DoctorReport, DoctorFixResult } from "../../web/lib/diagnostics-types.ts" const DOCTOR_MAX_BUFFER = 2 * 1024 * 1024 const DOCTOR_MODULE_ENV = "GSD_DOCTOR_MODULE" -function resolveDoctorModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "doctor.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } -function validateModulePaths( - resolveTsLoader: string, - doctorModulePath: string, -): void { - if (!existsSync(resolveTsLoader) || !existsSync(doctorModulePath)) { - throw new Error( - `doctor data provider not found; checked=${resolveTsLoader},${doctorModulePath}`, - ) - } -} - function runDoctorChild( packageRoot: string, projectCwd: string, script: string, resolveTsLoader: string, doctorModulePath: string, + moduleResolution: { modulePath: string; useCompiledJs: boolean }, scope?: string, ): Promise { + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, moduleResolution, pathToFileURL(resolveTsLoader).href) return new Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], @@ -78,8 +62,17 @@ export async function collectDoctorData(scope?: string, projectCwdOverride?: str const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const doctorModulePath = resolveDoctorModulePath(packageRoot) - validateModulePaths(resolveTsLoader, doctorModulePath) + const moduleResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/doctor.ts") + const doctorModulePath = moduleResolution.modulePath + + if (!moduleResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(doctorModulePath))) { + throw new Error( + `doctor data provider not found; checked=${resolveTsLoader},${doctorModulePath}`, + ) + } + if (moduleResolution.useCompiledJs && !existsSync(doctorModulePath)) { + throw new Error(`doctor data provider not found; checked=${doctorModulePath}`) + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -98,7 +91,7 @@ export async function collectDoctorData(scope?: string, projectCwdOverride?: str ].join(" ") const stdout = await runDoctorChild( - packageRoot, projectCwd, script, resolveTsLoader, doctorModulePath, scope, + packageRoot, projectCwd, script, resolveTsLoader, doctorModulePath, moduleResolution, scope, ) try { @@ -119,8 +112,17 @@ export async function applyDoctorFixes(scope?: string, projectCwdOverride?: stri const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const doctorModulePath = resolveDoctorModulePath(packageRoot) - validateModulePaths(resolveTsLoader, doctorModulePath) + const moduleResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/doctor.ts") + const doctorModulePath = moduleResolution.modulePath + + if (!moduleResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(doctorModulePath))) { + throw new Error( + `doctor data provider not found; checked=${resolveTsLoader},${doctorModulePath}`, + ) + } + if (moduleResolution.useCompiledJs && !existsSync(doctorModulePath)) { + throw new Error(`doctor data provider not found; checked=${doctorModulePath}`) + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -136,7 +138,7 @@ export async function applyDoctorFixes(scope?: string, projectCwdOverride?: stri ].join(" ") const stdout = await runDoctorChild( - packageRoot, projectCwd, script, resolveTsLoader, doctorModulePath, scope, + packageRoot, projectCwd, script, resolveTsLoader, doctorModulePath, moduleResolution, scope, ) try { diff --git a/src/web/export-service.ts b/src/web/export-service.ts index 46794d972..431f31473 100644 --- a/src/web/export-service.ts +++ b/src/web/export-service.ts @@ -4,16 +4,12 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { ExportResult } from "../../web/lib/remaining-command-types.ts" const EXPORT_MAX_BUFFER = 4 * 1024 * 1024 const EXPORT_MODULE_ENV = "GSD_EXPORT_MODULE" -function resolveExportModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "export.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -31,13 +27,17 @@ export async function collectExportData( const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const exportModulePath = resolveExportModulePath(packageRoot) + const moduleResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/export.ts") + const exportModulePath = moduleResolution.modulePath - if (!existsSync(resolveTsLoader) || !existsSync(exportModulePath)) { + if (!moduleResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(exportModulePath))) { throw new Error( `export data provider not found; checked=${resolveTsLoader},${exportModulePath}`, ) } + if (moduleResolution.useCompiledJs && !existsSync(exportModulePath)) { + throw new Error(`export data provider not found; checked=${exportModulePath}`) + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -55,14 +55,13 @@ export async function collectExportData( '}', ].join(" ") + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, moduleResolution, pathToFileURL(resolveTsLoader).href) + return await new Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/forensics-service.ts b/src/web/forensics-service.ts index 80867429e..e40703055 100644 --- a/src/web/forensics-service.ts +++ b/src/web/forensics-service.ts @@ -4,16 +4,12 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { ForensicReport } from "../../web/lib/diagnostics-types.ts" const FORENSICS_MAX_BUFFER = 2 * 1024 * 1024 const FORENSICS_MODULE_ENV = "GSD_FORENSICS_MODULE" -function resolveForensicsModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "forensics.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -30,13 +26,17 @@ export async function collectForensicsData(projectCwdOverride?: string): Promise const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const forensicsModulePath = resolveForensicsModulePath(packageRoot) + const moduleResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/forensics.ts") + const forensicsModulePath = moduleResolution.modulePath - if (!existsSync(resolveTsLoader) || !existsSync(forensicsModulePath)) { + if (!moduleResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(forensicsModulePath))) { throw new Error( `forensics data provider not found; checked=${resolveTsLoader},${forensicsModulePath}`, ) } + if (moduleResolution.useCompiledJs && !existsSync(forensicsModulePath)) { + throw new Error(`forensics data provider not found; checked=${forensicsModulePath}`) + } // The child script loads the upstream module, calls buildForensicReport(), // simplifies the output for browser consumption, and writes JSON to stdout. @@ -74,14 +74,13 @@ export async function collectForensicsData(projectCwdOverride?: string): Promise 'process.stdout.write(JSON.stringify(result));', ].join(" ") + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, moduleResolution, pathToFileURL(resolveTsLoader).href) + return await new Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/history-service.ts b/src/web/history-service.ts index c2d2a8685..a2ee75c68 100644 --- a/src/web/history-service.ts +++ b/src/web/history-service.ts @@ -4,16 +4,12 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { HistoryData } from "../../web/lib/remaining-command-types.ts" const HISTORY_MAX_BUFFER = 2 * 1024 * 1024 const HISTORY_MODULE_ENV = "GSD_HISTORY_MODULE" -function resolveHistoryModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "metrics.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -28,13 +24,17 @@ export async function collectHistoryData(projectCwdOverride?: string): Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/hooks-service.ts b/src/web/hooks-service.ts index bdaaea267..b8142dda4 100644 --- a/src/web/hooks-service.ts +++ b/src/web/hooks-service.ts @@ -4,16 +4,12 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { HooksData } from "../../web/lib/remaining-command-types.ts" const HOOKS_MAX_BUFFER = 512 * 1024 const HOOKS_MODULE_ENV = "GSD_HOOKS_MODULE" -function resolveHooksModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "post-unit-hooks.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -29,13 +25,17 @@ export async function collectHooksData(projectCwdOverride?: string): Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/onboarding-service.ts b/src/web/onboarding-service.ts index 9c5c6af34..26f4d6883 100644 --- a/src/web/onboarding-service.ts +++ b/src/web/onboarding-service.ts @@ -247,7 +247,7 @@ function resolveCredentialSource( if (getEnvApiKeyFn(providerId)) { return "environment"; } - if (authStorage.hasAuth(providerId)) { + if (authStorage.getCredentialsForProvider(providerId).length > 0) { return "runtime"; } return null; diff --git a/src/web/recovery-diagnostics-service.ts b/src/web/recovery-diagnostics-service.ts index 2217ea9af..ee5abeb92 100644 --- a/src/web/recovery-diagnostics-service.ts +++ b/src/web/recovery-diagnostics-service.ts @@ -8,7 +8,7 @@ import { collectSelectiveLiveStatePayload, resolveBridgeRuntimeConfig, } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { WorkspaceRecoveryBrowserAction, WorkspaceRecoveryCodeSummary, @@ -360,14 +360,6 @@ function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } -function resolveDoctorModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "doctor.ts") -} - -function resolveSessionForensicsModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "session-forensics.ts") -} - async function collectRecoveryDiagnosticsChildPayload( packageRoot: string, basePath: string, @@ -379,14 +371,21 @@ async function collectRecoveryDiagnosticsChildPayload( const env = options.env ?? process.env const checkExists = options.existsSync ?? existsSync const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const doctorModulePath = resolveDoctorModulePath(packageRoot) - const sessionForensicsModulePath = resolveSessionForensicsModulePath(packageRoot) + const doctorResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/doctor.ts", checkExists) + const forensicsResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/session-forensics.ts", checkExists) + const doctorModulePath = doctorResolution.modulePath + const sessionForensicsModulePath = forensicsResolution.modulePath - if (!checkExists(resolveTsLoader) || !checkExists(doctorModulePath) || !checkExists(sessionForensicsModulePath)) { + if (!doctorResolution.useCompiledJs && (!checkExists(resolveTsLoader) || !checkExists(doctorModulePath) || !checkExists(sessionForensicsModulePath))) { throw new Error( `recovery diagnostics providers not found; checked=${resolveTsLoader},${doctorModulePath},${sessionForensicsModulePath}`, ) } + if (doctorResolution.useCompiledJs && (!checkExists(doctorModulePath) || !checkExists(sessionForensicsModulePath))) { + throw new Error( + `recovery diagnostics providers not found; checked=${doctorModulePath},${sessionForensicsModulePath}`, + ) + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -468,14 +467,13 @@ async function collectRecoveryDiagnosticsChildPayload( '}));', ].join(" ") + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, doctorResolution, pathToFileURL(resolveTsLoader).href) + return await new Promise((resolveResult, reject) => { execFile( options.execPath ?? process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/settings-service.ts b/src/web/settings-service.ts index fec839679..bbca6132d 100644 --- a/src/web/settings-service.ts +++ b/src/web/settings-service.ts @@ -4,15 +4,11 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { SettingsData } from "../../web/lib/settings-types.ts" const SETTINGS_MAX_BUFFER = 2 * 1024 * 1024 -function resolveModulePath(packageRoot: string, moduleName: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", moduleName) -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -31,16 +27,34 @@ export async function collectSettingsData(projectCwdOverride?: string): Promise< const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const prefsPath = resolveModulePath(packageRoot, "preferences.ts") - const routerPath = resolveModulePath(packageRoot, "model-router.ts") - const budgetPath = resolveModulePath(packageRoot, "context-budget.ts") - const historyPath = resolveModulePath(packageRoot, "routing-history.ts") - const metricsPath = resolveModulePath(packageRoot, "metrics.ts") + const prefsResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/preferences.ts") + const routerResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/model-router.ts") + const budgetResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/context-budget.ts") + const historyResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/routing-history.ts") + const metricsResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/metrics.ts") - const requiredPaths = [resolveTsLoader, prefsPath, routerPath, budgetPath, historyPath, metricsPath] - for (const p of requiredPaths) { - if (!existsSync(p)) { - throw new Error(`settings data provider not found; missing=${p}`) + const prefsPath = prefsResolution.modulePath + const routerPath = routerResolution.modulePath + const budgetPath = budgetResolution.modulePath + const historyPath = historyResolution.modulePath + const metricsPath = metricsResolution.modulePath + + // All modules share the same compiled-vs-source mode (they're all from the same package) + const useCompiledJs = prefsResolution.useCompiledJs + + if (!useCompiledJs) { + const requiredPaths = [resolveTsLoader, prefsPath, routerPath, budgetPath, historyPath, metricsPath] + for (const p of requiredPaths) { + if (!existsSync(p)) { + throw new Error(`settings data provider not found; missing=${p}`) + } + } + } else { + const requiredPaths = [prefsPath, routerPath, budgetPath, historyPath, metricsPath] + for (const p of requiredPaths) { + if (!existsSync(p)) { + throw new Error(`settings data provider not found; missing=${p}`) + } } } @@ -105,14 +119,13 @@ export async function collectSettingsData(projectCwdOverride?: string): Promise< 'process.stdout.write(JSON.stringify({ preferences, routingConfig, budgetAllocation, routingHistory, projectTotals }));', ].join(" ") + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, prefsResolution, pathToFileURL(resolveTsLoader).href) + return await new Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/skill-health-service.ts b/src/web/skill-health-service.ts index 43e40ddd7..60834dc96 100644 --- a/src/web/skill-health-service.ts +++ b/src/web/skill-health-service.ts @@ -4,16 +4,12 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { SkillHealthReport } from "../../web/lib/diagnostics-types.ts" const SKILL_HEALTH_MAX_BUFFER = 2 * 1024 * 1024 const SKILL_HEALTH_MODULE_ENV = "GSD_SKILL_HEALTH_MODULE" -function resolveSkillHealthModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "skill-health.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -27,13 +23,17 @@ export async function collectSkillHealthData(projectCwdOverride?: string): Promi const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const skillHealthModulePath = resolveSkillHealthModulePath(packageRoot) + const moduleResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/skill-health.ts") + const skillHealthModulePath = moduleResolution.modulePath - if (!existsSync(resolveTsLoader) || !existsSync(skillHealthModulePath)) { + if (!moduleResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(skillHealthModulePath))) { throw new Error( `skill-health data provider not found; checked=${resolveTsLoader},${skillHealthModulePath}`, ) } + if (moduleResolution.useCompiledJs && !existsSync(skillHealthModulePath)) { + throw new Error(`skill-health data provider not found; checked=${skillHealthModulePath}`) + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -43,14 +43,13 @@ export async function collectSkillHealthData(projectCwdOverride?: string): Promi 'process.stdout.write(JSON.stringify(report));', ].join(" ") + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, moduleResolution, pathToFileURL(resolveTsLoader).href) + return await new Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/ts-subprocess-flags.ts b/src/web/ts-subprocess-flags.ts index 2365274e8..cb9d4977f 100644 --- a/src/web/ts-subprocess-flags.ts +++ b/src/web/ts-subprocess-flags.ts @@ -1,3 +1,6 @@ +import { existsSync as defaultExistsSync } from "node:fs" +import { join } from "node:path" + /** * Returns the correct Node.js type-stripping flag for subprocess spawning. * @@ -23,11 +26,80 @@ export function resolveTypeStrippingFlag(packageRoot: string): string { * Returns true when the given path sits inside a `node_modules/` directory. * Handles both Unix and Windows path separators. */ -function isUnderNodeModules(filePath: string): boolean { +export function isUnderNodeModules(filePath: string): boolean { const normalized = filePath.replace(/\\/g, "/") return normalized.includes("/node_modules/") } +export interface SubprocessModuleResolution { + /** Absolute path to the module file (either src/.ts or dist/.js). */ + modulePath: string + /** When true the module is pre-compiled JS — skip TS flags and loader. */ + useCompiledJs: boolean +} + +/** + * Resolves a subprocess module path, preferring compiled `dist/*.js` when the + * package root is under `node_modules/`. + * + * Node v24 unconditionally refuses `.ts` files under `node_modules/` — even + * with `--experimental-transform-types`. When GSD is installed globally via + * npm, every subprocess that loads a `.ts` extension module crashes with + * `ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING`. + * + * The compiled JS files already ship in the npm package (`dist/` is in the + * `files` array in package.json) and are the correct artefacts to use when + * running from a packaged install. + * + * @param packageRoot Absolute path to the GSD package root. + * @param relPath Path relative to `src/`, e.g. + * `"resources/extensions/gsd/workspace-index.ts"`. + * @param checkExists Optional `existsSync` override (for testing). + */ +export function resolveSubprocessModule( + packageRoot: string, + relPath: string, + checkExists: (path: string) => boolean = defaultExistsSync, +): SubprocessModuleResolution { + if (isUnderNodeModules(packageRoot)) { + const jsRelPath = relPath.replace(/\.ts$/, ".js") + const distPath = join(packageRoot, "dist", jsRelPath) + if (checkExists(distPath)) { + return { modulePath: distPath, useCompiledJs: true } + } + } + + return { + modulePath: join(packageRoot, "src", relPath), + useCompiledJs: false, + } +} + +/** + * Builds the Node.js subprocess prefix args for running a GSD extension module. + * + * When the module resolved to compiled JS (`useCompiledJs === true`), returns + * only `["--input-type=module"]` — no TS loader, no TS stripping flag. + * + * When the module is TypeScript source, returns the full prefix: + * `["--import", , , "--input-type=module"]`. + */ +export function buildSubprocessPrefixArgs( + packageRoot: string, + resolution: SubprocessModuleResolution, + tsLoaderHref: string, +): string[] { + if (resolution.useCompiledJs) { + return ["--input-type=module"] + } + return [ + "--import", + tsLoaderHref, + resolveTypeStrippingFlag(packageRoot), + "--input-type=module", + ] +} + /** * Returns true when the running Node version supports * `--experimental-transform-types` (available since Node v22.7.0). diff --git a/src/web/undo-service.ts b/src/web/undo-service.ts index ede0049c3..ad339a359 100644 --- a/src/web/undo-service.ts +++ b/src/web/undo-service.ts @@ -4,21 +4,13 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" import type { UndoInfo, UndoResult } from "../../web/lib/remaining-command-types.ts" const UNDO_MAX_BUFFER = 2 * 1024 * 1024 const UNDO_MODULE_ENV = "GSD_UNDO_MODULE" const PATHS_MODULE_ENV = "GSD_PATHS_MODULE" -function resolveUndoModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "undo.ts") -} - -function resolvePathsModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "paths.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -119,20 +111,30 @@ export async function collectUndoInfo(projectCwdOverride?: string): Promise { const config = resolveBridgeRuntimeConfig(undefined, projectCwdOverride) const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const undoModulePath = resolveUndoModulePath(packageRoot) - const pathsModulePath = resolvePathsModulePath(packageRoot) + const undoResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/undo.ts") + const pathsResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/paths.ts") + const undoModulePath = undoResolution.modulePath + const pathsModulePath = pathsResolution.modulePath - if (!existsSync(resolveTsLoader) || !existsSync(undoModulePath) || !existsSync(pathsModulePath)) { + // For subprocess args we use the undo resolution (both modules share the same compiled-vs-source state) + if (!undoResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(undoModulePath) || !existsSync(pathsModulePath))) { throw new Error( `undo service modules not found; checked=${resolveTsLoader},${undoModulePath},${pathsModulePath}`, ) } + if (undoResolution.useCompiledJs && (!existsSync(undoModulePath) || !existsSync(pathsModulePath))) { + throw new Error(`undo service modules not found; checked=${undoModulePath},${pathsModulePath}`) + } const script = [ 'const { pathToFileURL } = await import("node:url");', @@ -151,23 +153,20 @@ export async function executeUndo(projectCwdOverride?: string): Promise 0) {', - ' const { execSync } = await import("node:child_process");', + ' const { execFileSync } = await import("node:child_process");', ' for (const sha of commits.reverse()) {', - ' try { execSync(`git revert --no-commit ${sha}`, { cwd: basePath, stdio: "pipe" }); commitsReverted++; }', - ' catch { try { execSync("git revert --abort", { cwd: basePath, stdio: "pipe" }); } catch {} break; }', + ' try { execFileSync("git", ["revert", "--no-commit", sha], { cwd: basePath, stdio: "pipe" }); commitsReverted++; }', + ' catch { try { execFileSync("git", ["revert", "--abort"], { cwd: basePath, stdio: "pipe" }); } catch {} break; }', ' }', ' }', '}', - // Remove the entry from completed-units.json 'entries.pop();', 'writeFileSync(completedPath, JSON.stringify(entries, null, 2), "utf-8");', 'const results = [`Undone: ${unitType} (${unitId})`];', @@ -177,14 +176,13 @@ export async function executeUndo(projectCwdOverride?: string): Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/src/web/visualizer-service.ts b/src/web/visualizer-service.ts index d0b255343..93b1fcdd0 100644 --- a/src/web/visualizer-service.ts +++ b/src/web/visualizer-service.ts @@ -4,7 +4,7 @@ import { join } from "node:path" import { pathToFileURL } from "node:url" import { resolveBridgeRuntimeConfig } from "./bridge-service.ts" -import { resolveTypeStrippingFlag } from "./ts-subprocess-flags.ts" +import { resolveTypeStrippingFlag, resolveSubprocessModule, buildSubprocessPrefixArgs } from "./ts-subprocess-flags.ts" const VISUALIZER_MAX_BUFFER = 2 * 1024 * 1024 const VISUALIZER_MODULE_ENV = "GSD_VISUALIZER_MODULE" @@ -35,10 +35,6 @@ export interface SerializedVisualizerData { changelog: unknown } -function resolveVisualizerModulePath(packageRoot: string): string { - return join(packageRoot, "src", "resources", "extensions", "gsd", "visualizer-data.ts") -} - function resolveTsLoaderPath(packageRoot: string): string { return join(packageRoot, "src", "resources", "extensions", "gsd", "tests", "resolve-ts.mjs") } @@ -54,13 +50,17 @@ export async function collectVisualizerData(projectCwdOverride?: string): Promis const { packageRoot, projectCwd } = config const resolveTsLoader = resolveTsLoaderPath(packageRoot) - const visualizerModulePath = resolveVisualizerModulePath(packageRoot) + const moduleResolution = resolveSubprocessModule(packageRoot, "resources/extensions/gsd/visualizer-data.ts") + const visualizerModulePath = moduleResolution.modulePath - if (!existsSync(resolveTsLoader) || !existsSync(visualizerModulePath)) { + if (!moduleResolution.useCompiledJs && (!existsSync(resolveTsLoader) || !existsSync(visualizerModulePath))) { throw new Error( `visualizer data provider not found; checked=${resolveTsLoader},${visualizerModulePath}`, ) } + if (moduleResolution.useCompiledJs && !existsSync(visualizerModulePath)) { + throw new Error(`visualizer data provider not found; checked=${visualizerModulePath}`) + } // The child script loads the upstream module, calls loadVisualizerData(), // converts Map fields to Records, and writes JSON to stdout. @@ -80,14 +80,13 @@ export async function collectVisualizerData(projectCwdOverride?: string): Promis 'process.stdout.write(JSON.stringify(result));', ].join(" ") + const prefixArgs = buildSubprocessPrefixArgs(packageRoot, moduleResolution, pathToFileURL(resolveTsLoader).href) + return await new Promise((resolveResult, reject) => { execFile( process.execPath, [ - "--import", - pathToFileURL(resolveTsLoader).href, - resolveTypeStrippingFlag(packageRoot), - "--input-type=module", + ...prefixArgs, "--eval", script, ], diff --git a/web/app/api/switch-root/route.ts b/web/app/api/switch-root/route.ts new file mode 100644 index 000000000..900023bbe --- /dev/null +++ b/web/app/api/switch-root/route.ts @@ -0,0 +1,109 @@ +import { existsSync, readFileSync, statSync, writeFileSync, mkdirSync } from "node:fs"; +import { dirname, resolve } from "node:path"; +import { homedir } from "node:os"; +import { webPreferencesPath } from "../../../../src/app-paths.ts"; +import { discoverProjects } from "../../../../src/web/project-discovery-service.ts"; + +export const runtime = "nodejs"; +export const dynamic = "force-dynamic"; + +/** Shape of persisted web preferences. */ +interface WebPreferences { + devRoot?: string; + lastActiveProject?: string; +} + +/** Expand leading `~/` to the user's home directory. */ +function expandTilde(p: string): string { + if (p === "~") return homedir(); + if (p.startsWith("~/")) return homedir() + p.slice(1); + return p; +} + +/** + * POST /api/switch-root + * + * Validates the new root path, persists it as the `devRoot` preference, + * and returns the discovered projects under the new root. + * + * Request body: { "devRoot": "/absolute/path" } + * Response: { "devRoot": "/resolved/path", "projects": [...] } + */ +export async function POST(request: Request): Promise { + try { + const body = (await request.json()) as Record; + const rawDevRoot = typeof body.devRoot === "string" ? body.devRoot.trim() : ""; + + if (!rawDevRoot) { + return Response.json( + { error: "Missing devRoot in request body" }, + { status: 400 }, + ); + } + + const expanded = expandTilde(rawDevRoot); + const resolved = resolve(expanded); + + // Validate: path must exist + if (!existsSync(resolved)) { + return Response.json( + { error: `Path does not exist: ${resolved}` }, + { status: 400 }, + ); + } + + // Validate: path must be a directory + try { + const stat = statSync(resolved); + if (!stat.isDirectory()) { + return Response.json( + { error: `Not a directory: ${resolved}` }, + { status: 400 }, + ); + } + } catch { + return Response.json( + { error: `Cannot access path: ${resolved}` }, + { status: 400 }, + ); + } + + // Read existing preferences and merge + let existing: WebPreferences = {}; + try { + if (existsSync(webPreferencesPath)) { + existing = JSON.parse(readFileSync(webPreferencesPath, "utf-8")); + } + } catch { + // Corrupt file — start fresh + } + + const prefs: WebPreferences = { + ...existing, + devRoot: resolved, + // Clear last active project since we're changing the root + lastActiveProject: undefined, + }; + + // Ensure parent directory exists + const dir = dirname(webPreferencesPath); + if (!existsSync(dir)) { + mkdirSync(dir, { recursive: true }); + } + + writeFileSync(webPreferencesPath, JSON.stringify(prefs, null, 2), "utf-8"); + + // Discover projects under the new root + const projects = discoverProjects(resolved, true); + + return Response.json({ + devRoot: resolved, + projects, + }); + } catch (err) { + return Response.json( + { error: `Failed to switch root: ${err instanceof Error ? err.message : String(err)}` }, + { status: 500 }, + ); + } +} diff --git a/web/components/gsd/projects-view.tsx b/web/components/gsd/projects-view.tsx index c9be904a8..69f0fdcd1 100644 --- a/web/components/gsd/projects-view.tsx +++ b/web/components/gsd/projects-view.tsx @@ -317,22 +317,35 @@ export function ProjectsPanel({ const handleDevRootSaved = useCallback( async (newRoot: string) => { - setDevRoot(newRoot) setLoading(true) setError(null) try { - const discovered = await loadProjects(newRoot) - setProjects(discovered) + // Validate path and persist in a single call + const res = await authFetch("/api/switch-root", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ devRoot: newRoot }), + }) + + if (!res.ok) { + const body = await res.json().catch(() => ({})) + throw new Error((body as { error?: string }).error ?? `Request failed (${res.status})`) + } + + const data = await res.json() as { devRoot: string; projects: ProjectMetadata[] } + setDevRoot(data.devRoot) + setProjects(data.projects) } catch (err) { - setError(err instanceof Error ? err.message : "Failed to load projects") + setError(err instanceof Error ? err.message : "Failed to switch project root") } finally { setLoading(false) } }, - [loadProjects], + [], ) const [newProjectOpen, setNewProjectOpen] = useState(false) + const [changeRootOpen, setChangeRootOpen] = useState(false) const workspaceState = useGSDWorkspaceState() const handleProjectCreated = useCallback( @@ -468,11 +481,19 @@ export function ProjectsPanel({

Projects

{devRoot && !loading && ( -

- {devRoot} - · - {projects.length} project{projects.length !== 1 ? "s" : ""} -

+
+ {devRoot} + + · + {projects.length} project{projects.length !== 1 ? "s" : ""} +
)}
+ + )} + {/* Filter + count */}

@@ -1240,8 +1297,31 @@ export function ProjectSelectionGate() { )}

)} + + {/* Change root for "no projects" and "no devRoot" states */} + {devRoot && !loading && sortedProjects.length === 0 && !error && ( +
+ +
+ )} + + {/* Folder picker for changing dev root */} + void handleDevRootSaved(path)} + initialPath={devRoot} + /> ) }