merge: Resolve conflicts with origin/main
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
commit
d1c948086e
143 changed files with 4526 additions and 1073 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -1,4 +1,7 @@
|
|||
|
||||
# ── Compiled test output ──
|
||||
dist-test/
|
||||
|
||||
# ── GSD project state (development-only, lives in worktree branches) ──
|
||||
package-lock.json
|
||||
.claude/
|
||||
|
|
|
|||
67
CHANGELOG.md
67
CHANGELOG.md
|
|
@ -6,6 +6,67 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
## [2.56.0] - 2026-03-27
|
||||
|
||||
### Added
|
||||
- **parallel**: /gsd parallel watch — native TUI overlay for worker monitoring (#2806)
|
||||
|
||||
### Fixed
|
||||
- **ci**: copy web/components to dist-test for xterm-theme test (#2891)
|
||||
- **gsd**: prefer PREFERENCES.md in worktrees (#2796)
|
||||
- **gsd**: resume auto-mode after transient provider pause (#2822)
|
||||
- **parallel**: resolve session lock contention and 3 related parallel-mode bugs (#2184) (#2800)
|
||||
- **web**: improve light theme terminal contrast (#2819)
|
||||
- **gsd**: preserve auto start model through discuss (#2837)
|
||||
|
||||
### Changed
|
||||
- **test**: compile unit tests with esbuild, reclassify integration tests, fix node_modules symlink (#2809)
|
||||
|
||||
## [2.55.0] - 2026-03-27
|
||||
|
||||
### Added
|
||||
- colorized headless verbose output with thinking, phases, cost, and durations (#2886)
|
||||
- headless text mode observability + skip UAT pause (#2867)
|
||||
|
||||
### Fixed
|
||||
- **cli**: let gsd update bypass version mismatch gate (#2845)
|
||||
- **contracts**: add isWorkspaceEvent guard + close routeLiveInteractionEvent exhaustiveness gap (#2878)
|
||||
- **gsd**: use project root for prior-slice dispatch guard (#2863)
|
||||
- **gsd**: include queue context in milestone planning prompts (#2846)
|
||||
- detect monorepo roots in project discovery to prevent workspace fragmentation (#2849)
|
||||
- **bg-shell**: recover from deleted cwd in timers (#2850)
|
||||
- **gsd**: enable dynamic routing without models section (#2851)
|
||||
- **interactive**: fully remove providers from /providers (#2852)
|
||||
|
||||
## [2.54.0] - 2026-03-27
|
||||
|
||||
### Added
|
||||
- Headless Integration Hardening & Release (M002) (#2811)
|
||||
- **parallel**: add real-time TUI monitor dashboard with self-healing (#2799)
|
||||
|
||||
## [2.53.0] - 2026-03-27
|
||||
|
||||
### Added
|
||||
- **vscode**: activity feed, workflow controls, session forking, enhanced code lens [2/3] (#2656)
|
||||
- **gsd**: enable safety mechanisms by default (snapshots, pre-merge checks) (#2678)
|
||||
|
||||
### Fixed
|
||||
- hydrate collected secrets for current session (#2788)
|
||||
- resolve stash pop conflicts and stop swallowing merge errors (#2780)
|
||||
- treat any extracted verdict as terminal in isValidationTerminal (#2774)
|
||||
- use localStorage for auth token to enable multi-tab usage (#2785)
|
||||
- guard activeMilestone.id access in discuss and headless paths (#2776)
|
||||
- clean up zombie parallel workers stuck in error state (#2782)
|
||||
- relax milestone validation gate to accept prose evidence (#2779)
|
||||
- write milestone reports to project root instead of worktree (#2778)
|
||||
- auto-resolve build artifact conflicts in milestone merge (#2777)
|
||||
- let rate-limit errors attempt model fallback before pausing (#2775)
|
||||
- prevent gsd next from self-killing via stale crash lock (#2784)
|
||||
- add shell flag for Windows spawn in VSCode extension (#2781)
|
||||
|
||||
### Changed
|
||||
- **gsd**: extract duplicated status guards and validation helpers (#2767)
|
||||
|
||||
## [2.52.0] - 2026-03-27
|
||||
|
||||
### Added
|
||||
|
|
@ -2050,7 +2111,11 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
|||
### Changed
|
||||
- License updated to MIT
|
||||
|
||||
[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.52.0...HEAD
|
||||
[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.56.0...HEAD
|
||||
[2.56.0]: https://github.com/gsd-build/gsd-2/compare/v2.55.0...v2.56.0
|
||||
[2.55.0]: https://github.com/gsd-build/gsd-2/compare/v2.54.0...v2.55.0
|
||||
[2.54.0]: https://github.com/gsd-build/gsd-2/compare/v2.53.0...v2.54.0
|
||||
[2.53.0]: https://github.com/gsd-build/gsd-2/compare/v2.52.0...v2.53.0
|
||||
[2.52.0]: https://github.com/gsd-build/gsd-2/compare/v2.51.0...v2.52.0
|
||||
[2.51.0]: https://github.com/gsd-build/gsd-2/compare/v2.50.0...v2.51.0
|
||||
[2.50.0]: https://github.com/gsd-build/gsd-2/compare/v2.49.0...v2.50.0
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-darwin-arm64",
|
||||
"version": "2.52.0",
|
||||
"version": "2.56.0",
|
||||
"description": "GSD native engine binary for macOS ARM64",
|
||||
"os": [
|
||||
"darwin"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-darwin-x64",
|
||||
"version": "2.52.0",
|
||||
"version": "2.56.0",
|
||||
"description": "GSD native engine binary for macOS Intel",
|
||||
"os": [
|
||||
"darwin"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-linux-arm64-gnu",
|
||||
"version": "2.52.0",
|
||||
"version": "2.56.0",
|
||||
"description": "GSD native engine binary for Linux ARM64 (glibc)",
|
||||
"os": [
|
||||
"linux"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-linux-x64-gnu",
|
||||
"version": "2.52.0",
|
||||
"version": "2.56.0",
|
||||
"description": "GSD native engine binary for Linux x64 (glibc)",
|
||||
"os": [
|
||||
"linux"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-win32-x64-msvc",
|
||||
"version": "2.52.0",
|
||||
"version": "2.56.0",
|
||||
"description": "GSD native engine binary for Windows x64 (MSVC)",
|
||||
"os": [
|
||||
"win32"
|
||||
|
|
|
|||
314
package-lock.json
generated
314
package-lock.json
generated
|
|
@ -1243,155 +1243,6 @@
|
|||
"sisteransi": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/builders": {
|
||||
"version": "1.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@discordjs/builders/-/builders-1.14.0.tgz",
|
||||
"integrity": "sha512-7pVKxVWkeLUtrTo9nTYkjRcJk0Hlms6lYervXAD7E7+K5lil9ms2JrEB1TalMiHvQMh7h1HJZ4fCJa0/vHpl4w==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@discordjs/formatters": "^0.6.2",
|
||||
"@discordjs/util": "^1.2.0",
|
||||
"@sapphire/shapeshift": "^4.0.0",
|
||||
"discord-api-types": "^0.38.40",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"ts-mixer": "^6.0.4",
|
||||
"tslib": "^2.6.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.11.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/discordjs/discord.js?sponsor"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/collection": {
|
||||
"version": "1.5.3",
|
||||
"resolved": "https://registry.npmjs.org/@discordjs/collection/-/collection-1.5.3.tgz",
|
||||
"integrity": "sha512-SVb428OMd3WO1paV3rm6tSjM4wC+Kecaa1EUGX7vc6/fddvw/6lg90z4QtCqm21zvVe92vMMDt9+DkIvjXImQQ==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=16.11.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/formatters": {
|
||||
"version": "0.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@discordjs/formatters/-/formatters-0.6.2.tgz",
|
||||
"integrity": "sha512-y4UPwWhH6vChKRkGdMB4odasUbHOUwy7KL+OVwF86PvT6QVOwElx+TiI1/6kcmcEe+g5YRXJFiXSXUdabqZOvQ==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"discord-api-types": "^0.38.33"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.11.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/discordjs/discord.js?sponsor"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/rest": {
|
||||
"version": "2.6.1",
|
||||
"resolved": "https://registry.npmjs.org/@discordjs/rest/-/rest-2.6.1.tgz",
|
||||
"integrity": "sha512-wwQdgjeaoYFiaG+atbqx6aJDpqW7JHAo0HrQkBTbYzM3/PJ3GweQIpgElNcGZ26DCUOXMyawYd0YF7vtr+fZXg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@discordjs/collection": "^2.1.1",
|
||||
"@discordjs/util": "^1.2.0",
|
||||
"@sapphire/async-queue": "^1.5.3",
|
||||
"@sapphire/snowflake": "^3.5.5",
|
||||
"@vladfrangu/async_event_emitter": "^2.4.6",
|
||||
"discord-api-types": "^0.38.40",
|
||||
"magic-bytes.js": "^1.13.0",
|
||||
"tslib": "^2.6.3",
|
||||
"undici": "6.24.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/discordjs/discord.js?sponsor"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/rest/node_modules/@discordjs/collection": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@discordjs/collection/-/collection-2.1.1.tgz",
|
||||
"integrity": "sha512-LiSusze9Tc7qF03sLCujF5iZp7K+vRNEDBZ86FT9aQAv3vxMLihUvKvpsCWiQ2DJq1tVckopKm1rxomgNUc9hg==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/discordjs/discord.js?sponsor"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/rest/node_modules/@sapphire/snowflake": {
|
||||
"version": "3.5.5",
|
||||
"resolved": "https://registry.npmjs.org/@sapphire/snowflake/-/snowflake-3.5.5.tgz",
|
||||
"integrity": "sha512-xzvBr1Q1c4lCe7i6sRnrofxeO1QTP/LKQ6A6qy0iB4x5yfiSfARMEQEghojzTNALDTcv8En04qYNIco9/K9eZQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=v14.0.0",
|
||||
"npm": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/rest/node_modules/undici": {
|
||||
"version": "6.24.1",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-6.24.1.tgz",
|
||||
"integrity": "sha512-sC+b0tB1whOCzbtlx20fx3WgCXwkW627p4EA9uM+/tNNPkSS+eSEld6pAs9nDv7WbY1UUljBMYPtu9BCOrCWKA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18.17"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/util": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@discordjs/util/-/util-1.2.0.tgz",
|
||||
"integrity": "sha512-3LKP7F2+atl9vJFhaBjn4nOaSWahZ/yWjOvA4e5pnXkt2qyXRCHLxoBQy81GFtLGCq7K9lPm9R517M1U+/90Qg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"discord-api-types": "^0.38.33"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/discordjs/discord.js?sponsor"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/ws": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@discordjs/ws/-/ws-1.2.3.tgz",
|
||||
"integrity": "sha512-wPlQDxEmlDg5IxhJPuxXr3Vy9AjYq5xCvFWGJyD7w7Np8ZGu+Mc+97LCoEc/+AYCo2IDpKioiH0/c/mj5ZR9Uw==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@discordjs/collection": "^2.1.0",
|
||||
"@discordjs/rest": "^2.5.1",
|
||||
"@discordjs/util": "^1.1.0",
|
||||
"@sapphire/async-queue": "^1.5.2",
|
||||
"@types/ws": "^8.5.10",
|
||||
"@vladfrangu/async_event_emitter": "^2.2.4",
|
||||
"discord-api-types": "^0.38.1",
|
||||
"tslib": "^2.6.2",
|
||||
"ws": "^8.17.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.11.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/discordjs/discord.js?sponsor"
|
||||
}
|
||||
},
|
||||
"node_modules/@discordjs/ws/node_modules/@discordjs/collection": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@discordjs/collection/-/collection-2.1.1.tgz",
|
||||
"integrity": "sha512-LiSusze9Tc7qF03sLCujF5iZp7K+vRNEDBZ86FT9aQAv3vxMLihUvKvpsCWiQ2DJq1tVckopKm1rxomgNUc9hg==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/discordjs/discord.js?sponsor"
|
||||
}
|
||||
},
|
||||
"node_modules/@electron/get": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@electron/get/-/get-2.0.3.tgz",
|
||||
|
|
@ -1899,10 +1750,6 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@gsd-build/daemon": {
|
||||
"resolved": "packages/daemon",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@gsd-build/engine-darwin-arm64": {
|
||||
"version": "2.10.5",
|
||||
"resolved": "https://registry.npmjs.org/@gsd-build/engine-darwin-arm64/-/engine-darwin-arm64-2.10.5.tgz",
|
||||
|
|
@ -3214,39 +3061,6 @@
|
|||
],
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/@sapphire/async-queue": {
|
||||
"version": "1.5.5",
|
||||
"resolved": "https://registry.npmjs.org/@sapphire/async-queue/-/async-queue-1.5.5.tgz",
|
||||
"integrity": "sha512-cvGzxbba6sav2zZkH8GPf2oGk9yYoD5qrNWdu9fRehifgnFZJMV+nuy2nON2roRO4yQQ+v7MK/Pktl/HgfsUXg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=v14.0.0",
|
||||
"npm": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@sapphire/shapeshift": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@sapphire/shapeshift/-/shapeshift-4.0.0.tgz",
|
||||
"integrity": "sha512-d9dUmWVA7MMiKobL3VpLF8P2aeanRTu6ypG2OIaEv/ZHH/SUQ2iHOVyi5wAPjQ+HmnMuL0whK9ez8I/raWbtIg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"lodash": "^4.17.21"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=v16"
|
||||
}
|
||||
},
|
||||
"node_modules/@sapphire/snowflake": {
|
||||
"version": "3.5.3",
|
||||
"resolved": "https://registry.npmjs.org/@sapphire/snowflake/-/snowflake-3.5.3.tgz",
|
||||
"integrity": "sha512-jjmJywLAFoWeBi1W7994zZyiNWPIiqRRNAmSERxyg93xRGzNYvGjlZ0gR6x0F4gPRi2+0O6S71kOZYyr3cxaIQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=v14.0.0",
|
||||
"npm": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@silvia-odwyer/photon-node": {
|
||||
"version": "0.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@silvia-odwyer/photon-node/-/photon-node-0.3.4.tgz",
|
||||
|
|
@ -4427,15 +4241,6 @@
|
|||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/ws": {
|
||||
"version": "8.18.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz",
|
||||
"integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/yauzl": {
|
||||
"version": "2.10.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz",
|
||||
|
|
@ -4467,16 +4272,6 @@
|
|||
"vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@vladfrangu/async_event_emitter": {
|
||||
"version": "2.4.7",
|
||||
"resolved": "https://registry.npmjs.org/@vladfrangu/async_event_emitter/-/async_event_emitter-2.4.7.tgz",
|
||||
"integrity": "sha512-Xfe6rpCTxSxfbswi/W/Pz7zp1WWSNn4A0eW4mLkQUewCrXXtMj31lCg+iQyTkh/CkusZSq9eDflu7tjEDXUY6g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=v14.0.0",
|
||||
"npm": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/accepts": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz",
|
||||
|
|
@ -5205,51 +5000,6 @@
|
|||
"node": ">=0.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/discord-api-types": {
|
||||
"version": "0.38.42",
|
||||
"resolved": "https://registry.npmjs.org/discord-api-types/-/discord-api-types-0.38.42.tgz",
|
||||
"integrity": "sha512-qs1kya7S84r5RR8m9kgttywGrmmoHaRifU1askAoi+wkoSefLpZP6aGXusjNw5b0jD3zOg3LTwUa3Tf2iHIceQ==",
|
||||
"license": "MIT",
|
||||
"workspaces": [
|
||||
"scripts/actions/documentation"
|
||||
]
|
||||
},
|
||||
"node_modules/discord.js": {
|
||||
"version": "14.25.1",
|
||||
"resolved": "https://registry.npmjs.org/discord.js/-/discord.js-14.25.1.tgz",
|
||||
"integrity": "sha512-2l0gsPOLPs5t6GFZfQZKnL1OJNYFcuC/ETWsW4VtKVD/tg4ICa9x+jb9bkPffkMdRpRpuUaO/fKkHCBeiCKh8g==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@discordjs/builders": "^1.13.0",
|
||||
"@discordjs/collection": "1.5.3",
|
||||
"@discordjs/formatters": "^0.6.2",
|
||||
"@discordjs/rest": "^2.6.0",
|
||||
"@discordjs/util": "^1.2.0",
|
||||
"@discordjs/ws": "^1.2.3",
|
||||
"@sapphire/snowflake": "3.5.3",
|
||||
"discord-api-types": "^0.38.33",
|
||||
"fast-deep-equal": "3.1.3",
|
||||
"lodash.snakecase": "4.1.1",
|
||||
"magic-bytes.js": "^1.10.0",
|
||||
"tslib": "^2.6.3",
|
||||
"undici": "6.21.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/discordjs/discord.js?sponsor"
|
||||
}
|
||||
},
|
||||
"node_modules/discord.js/node_modules/undici": {
|
||||
"version": "6.21.3",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-6.21.3.tgz",
|
||||
"integrity": "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18.17"
|
||||
}
|
||||
},
|
||||
"node_modules/dunder-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||
|
|
@ -6921,18 +6671,6 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/lodash": {
|
||||
"version": "4.17.23",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
|
||||
"integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.snakecase": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz",
|
||||
"integrity": "sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/long": {
|
||||
"version": "5.3.2",
|
||||
"resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz",
|
||||
|
|
@ -6958,12 +6696,6 @@
|
|||
"node": "20 || >=22"
|
||||
}
|
||||
},
|
||||
"node_modules/magic-bytes.js": {
|
||||
"version": "1.13.0",
|
||||
"resolved": "https://registry.npmjs.org/magic-bytes.js/-/magic-bytes.js-1.13.0.tgz",
|
||||
"integrity": "sha512-afO2mnxW7GDTXMm5/AoN1WuOcdoKhtgXjIvHmobqTD1grNplhGdv3PFOyjCVmrnOZBIT/gD/koDKpYG+0mvHcg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/magic-string": {
|
||||
"version": "0.30.21",
|
||||
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz",
|
||||
|
|
@ -8440,12 +8172,6 @@
|
|||
"integrity": "sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/ts-mixer": {
|
||||
"version": "6.0.4",
|
||||
"resolved": "https://registry.npmjs.org/ts-mixer/-/ts-mixer-6.0.4.tgz",
|
||||
"integrity": "sha512-ufKpbmrugz5Aou4wcr5Wc1UUFWOLhq+Fm6qa6P0w0K5Qw2yhaUoiWszhCVuNQyNwrlGiscHOmqYoAox1PtvgjA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tslib": {
|
||||
"version": "2.8.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||
|
|
@ -9423,46 +9149,6 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"packages/daemon": {
|
||||
"name": "@gsd-build/daemon",
|
||||
"version": "0.1.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.52.0",
|
||||
"@gsd-build/rpc-client": "^2.52.0",
|
||||
"discord.js": "^14.25.1",
|
||||
"yaml": "^2.8.0",
|
||||
"zod": "^3.24.0"
|
||||
},
|
||||
"bin": {
|
||||
"gsd-daemon": "dist/cli.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.12.0",
|
||||
"typescript": "^5.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=22.0.0"
|
||||
}
|
||||
},
|
||||
"packages/daemon/node_modules/@anthropic-ai/sdk": {
|
||||
"version": "0.52.0",
|
||||
"resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.52.0.tgz",
|
||||
"integrity": "sha512-d4c+fg+xy9e46c8+YnrrgIQR45CZlAi7PwdzIfDXDM6ACxEZli1/fxhURsq30ZpMZy6LvSkr41jGq5aF5TD7rQ==",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"anthropic-ai-sdk": "bin/cli"
|
||||
}
|
||||
},
|
||||
"packages/daemon/node_modules/zod": {
|
||||
"version": "3.25.76",
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/colinhacks"
|
||||
}
|
||||
},
|
||||
"packages/mcp-server": {
|
||||
"name": "@gsd-build/mcp-server",
|
||||
"version": "2.52.0",
|
||||
|
|
|
|||
10
package.json
10
package.json
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "gsd-pi",
|
||||
"version": "2.52.0",
|
||||
"version": "2.56.0",
|
||||
"description": "GSD — Get Shit Done coding agent",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
|
|
@ -53,11 +53,12 @@
|
|||
"copy-resources": "node scripts/copy-resources.cjs",
|
||||
"copy-themes": "node scripts/copy-themes.cjs",
|
||||
"copy-export-html": "node scripts/copy-export-html.cjs",
|
||||
"test:unit": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*.test.ts src/resources/extensions/gsd/tests/*.test.mjs src/tests/*.test.ts",
|
||||
"test:compile": "node scripts/compile-tests.mjs",
|
||||
"test:unit": "npm run test:compile && node --import ./scripts/dist-test-resolve.mjs --experimental-test-isolation=process --test-reporter=./scripts/test-reporter-compact.mjs --test 'dist-test/src/tests/*.test.js' 'dist-test/src/resources/extensions/gsd/tests/*.test.js' 'dist-test/src/resources/extensions/gsd/tests/*.test.mjs' 'dist-test/src/resources/extensions/shared/tests/*.test.js' 'dist-test/src/resources/extensions/claude-code-cli/tests/*.test.js' 'dist-test/src/resources/extensions/github-sync/tests/*.test.js' 'dist-test/src/resources/extensions/universal-config/tests/*.test.js' 'dist-test/src/resources/extensions/voice/tests/*.test.js'",
|
||||
"test:packages": "node --test packages/pi-coding-agent/dist/core/*.test.js",
|
||||
"test:marketplace": "GSD_TEST_CLONE_MARKETPLACES=1 node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test src/resources/extensions/gsd/tests/claude-import-tui.test.ts src/resources/extensions/gsd/tests/plugin-importer-live.test.ts src/tests/marketplace-discovery.test.ts",
|
||||
"test:coverage": "c8 --reporter=text --reporter=lcov --exclude='src/resources/extensions/gsd/tests/**' --exclude='src/tests/**' --exclude='scripts/**' --exclude='native/**' --exclude='node_modules/**' --check-coverage --statements=40 --lines=40 --branches=20 --functions=20 node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*.test.ts src/resources/extensions/gsd/tests/*.test.mjs src/tests/*.test.ts",
|
||||
"test:integration": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*integration*.test.ts src/tests/integration/*.test.ts",
|
||||
"test:coverage": "c8 --reporter=text --reporter=lcov --exclude='src/resources/extensions/gsd/tests/**' --exclude='src/tests/**' --exclude='scripts/**' --exclude='native/**' --exclude='node_modules/**' --check-coverage --statements=40 --lines=40 --branches=20 --functions=20 node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*.test.ts src/resources/extensions/gsd/tests/*.test.mjs src/tests/*.test.ts src/resources/extensions/shared/tests/*.test.ts",
|
||||
"test:integration": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test 'src/tests/integration/*.test.ts' 'src/resources/extensions/gsd/tests/integration/*.test.ts' 'src/resources/extensions/async-jobs/*.test.ts' 'src/resources/extensions/browser-tools/tests/*.test.mjs'",
|
||||
"pretest": "npm run typecheck:extensions",
|
||||
"test": "npm run test:unit && npm run test:integration",
|
||||
"test:smoke": "node --experimental-strip-types tests/smoke/run.ts",
|
||||
|
|
@ -136,6 +137,7 @@
|
|||
"@types/node": "^24.12.0",
|
||||
"@types/picomatch": "^4.0.2",
|
||||
"c8": "^11.0.0",
|
||||
"esbuild": "^0.25.12",
|
||||
"jiti": "^2.6.1",
|
||||
"typescript": "^5.4.0"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd/pi-coding-agent",
|
||||
"version": "2.52.0",
|
||||
"version": "2.56.0",
|
||||
"description": "Coding agent CLI (vendored from pi-mono)",
|
||||
"type": "module",
|
||||
"piConfig": {
|
||||
|
|
|
|||
|
|
@ -235,7 +235,7 @@ export class ModelRegistry {
|
|||
|
||||
constructor(
|
||||
readonly authStorage: AuthStorage,
|
||||
private modelsJsonPath: string | undefined = join(getAgentDir(), "models.json"),
|
||||
readonly modelsJsonPath: string | undefined = join(getAgentDir(), "models.json"),
|
||||
) {
|
||||
this.discoveryCache = new ModelDiscoveryCache();
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import {
|
|||
import type { AuthStorage } from "../../../core/auth-storage.js";
|
||||
import { getDiscoverableProviders } from "../../../core/model-discovery.js";
|
||||
import type { ModelRegistry } from "../../../core/model-registry.js";
|
||||
import { ModelsJsonWriter } from "../../../core/models-json-writer.js";
|
||||
import { theme } from "../theme/theme.js";
|
||||
import { rawKeyHint } from "./keybinding-hints.js";
|
||||
|
||||
|
|
@ -39,6 +40,7 @@ export class ProviderManagerComponent extends Container implements Focusable {
|
|||
private tui: TUI;
|
||||
private authStorage: AuthStorage;
|
||||
private modelRegistry: ModelRegistry;
|
||||
private modelsJsonWriter: ModelsJsonWriter;
|
||||
private onDone: () => void;
|
||||
private onDiscover: (provider: string) => void;
|
||||
|
||||
|
|
@ -54,6 +56,7 @@ export class ProviderManagerComponent extends Container implements Focusable {
|
|||
this.tui = tui;
|
||||
this.authStorage = authStorage;
|
||||
this.modelRegistry = modelRegistry;
|
||||
this.modelsJsonWriter = new ModelsJsonWriter(this.modelRegistry.modelsJsonPath);
|
||||
this.onDone = onDone;
|
||||
this.onDiscover = onDiscover;
|
||||
|
||||
|
|
@ -64,7 +67,7 @@ export class ProviderManagerComponent extends Container implements Focusable {
|
|||
// Hints
|
||||
const hints = [
|
||||
rawKeyHint("d", "discover"),
|
||||
rawKeyHint("r", "remove auth"),
|
||||
rawKeyHint("r", "remove"),
|
||||
rawKeyHint("esc", "close"),
|
||||
].join(" ");
|
||||
this.addChild(new Text(hints, 0, 0));
|
||||
|
|
@ -102,6 +105,15 @@ export class ProviderManagerComponent extends Container implements Focusable {
|
|||
supportsDiscovery: discoverableSet.has(name),
|
||||
modelCount: providerModelCounts.get(name) ?? 0,
|
||||
}));
|
||||
this.clampSelectedIndex();
|
||||
}
|
||||
|
||||
private clampSelectedIndex(): void {
|
||||
if (this.providers.length === 0) {
|
||||
this.selectedIndex = 0;
|
||||
return;
|
||||
}
|
||||
this.selectedIndex = Math.min(this.selectedIndex, this.providers.length - 1);
|
||||
}
|
||||
|
||||
private updateList(): void {
|
||||
|
|
@ -152,8 +164,10 @@ export class ProviderManagerComponent extends Container implements Focusable {
|
|||
}
|
||||
} else if (keyData === "r" || keyData === "R") {
|
||||
const provider = this.providers[this.selectedIndex];
|
||||
if (provider?.hasAuth) {
|
||||
if (provider) {
|
||||
this.authStorage.remove(provider.name);
|
||||
this.modelsJsonWriter.removeProvider(provider.name);
|
||||
this.modelRegistry.refresh();
|
||||
this.loadProviders();
|
||||
this.updateList();
|
||||
this.tui.requestRender();
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@glittercowboy/gsd",
|
||||
"version": "2.52.0",
|
||||
"version": "2.56.0",
|
||||
"piConfig": {
|
||||
"name": "gsd",
|
||||
"configDir": ".gsd"
|
||||
|
|
|
|||
217
scripts/compile-tests.mjs
Normal file
217
scripts/compile-tests.mjs
Normal file
|
|
@ -0,0 +1,217 @@
|
|||
#!/usr/bin/env node
|
||||
/**
|
||||
* Compile all TypeScript source + test files to dist-test/ using esbuild.
|
||||
* Run compiled JS directly with node --test (no per-file TS overhead).
|
||||
*
|
||||
* Usage: node scripts/compile-tests.mjs
|
||||
*/
|
||||
|
||||
import { cp, mkdir, readdir, readFile, writeFile } from 'node:fs/promises';
|
||||
import { existsSync, symlinkSync } from 'node:fs';
|
||||
import { createRequire } from 'node:module';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
||||
const ROOT = join(__dirname, '..');
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const esbuild = require(join(ROOT, 'node_modules/esbuild'));
|
||||
|
||||
// Recursively collect files by extension (skip node_modules, templates, etc.)
|
||||
// Directories to skip during file collection
|
||||
const SKIP_DIRS = new Set(['node_modules', 'templates', '__tests__', 'integration']);
|
||||
|
||||
async function collectFiles(dir, exts = ['.ts', '.mjs']) {
|
||||
const results = [];
|
||||
let entries;
|
||||
try {
|
||||
entries = await readdir(dir, { withFileTypes: true });
|
||||
} catch {
|
||||
return results;
|
||||
}
|
||||
for (const entry of entries) {
|
||||
if (SKIP_DIRS.has(entry.name)) continue;
|
||||
const full = join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
results.push(...await collectFiles(full, exts));
|
||||
} else if (
|
||||
exts.some(ext => entry.name.endsWith(ext)) &&
|
||||
!entry.name.endsWith('.d.ts')
|
||||
) {
|
||||
results.push(full);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
// Dirs to skip when copying assets (node_modules are never useful in dist-test)
|
||||
const ASSET_SKIP_DIRS = new Set(['node_modules', '__tests__', 'integration']);
|
||||
|
||||
/**
|
||||
* Recursively copy files from srcDir to destDir.
|
||||
* Skips node_modules only. Copies everything: .ts/.tsx originals (for jiti),
|
||||
* .mjs helpers, .md/.yaml/.json assets, etc.
|
||||
* esbuild compiled .js output already lands in dist-test, so we just
|
||||
* overlay the asset files on top.
|
||||
*/
|
||||
async function copyAssets(srcDir, destDir) {
|
||||
let entries;
|
||||
try {
|
||||
entries = await readdir(srcDir, { withFileTypes: true });
|
||||
} catch {
|
||||
return; // directory doesn't exist, nothing to copy
|
||||
}
|
||||
for (const entry of entries) {
|
||||
if (ASSET_SKIP_DIRS.has(entry.name)) continue;
|
||||
const srcPath = join(srcDir, entry.name);
|
||||
const destPath = join(destDir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
await copyAssets(srcPath, destPath);
|
||||
} else {
|
||||
await mkdir(destDir, { recursive: true });
|
||||
await cp(srcPath, destPath, { force: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const start = Date.now();
|
||||
|
||||
// Collect entry points from src/ and packages/*/src/
|
||||
const srcFiles = await collectFiles(join(ROOT, 'src'));
|
||||
|
||||
const packagesDir = join(ROOT, 'packages');
|
||||
const pkgEntries = await readdir(packagesDir, { withFileTypes: true });
|
||||
const packageFiles = [];
|
||||
for (const entry of pkgEntries) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
const pkgSrc = join(packagesDir, entry.name, 'src');
|
||||
packageFiles.push(...await collectFiles(pkgSrc));
|
||||
}
|
||||
|
||||
// Also compile web/lib/ — some tests import from ../../web/lib/
|
||||
const webLibFiles = await collectFiles(join(ROOT, 'web', 'lib'));
|
||||
|
||||
const entryPoints = [...srcFiles, ...packageFiles, ...webLibFiles];
|
||||
console.log(`Compiling ${entryPoints.length} files to dist-test/...`);
|
||||
|
||||
// bundle:false transforms TypeScript but keeps import specifiers verbatim.
|
||||
// We post-process the output to rewrite .ts → .js in import strings.
|
||||
await esbuild.build({
|
||||
entryPoints,
|
||||
outdir: join(ROOT, 'dist-test'),
|
||||
outbase: ROOT,
|
||||
bundle: false,
|
||||
format: 'esm',
|
||||
platform: 'node',
|
||||
target: 'node22',
|
||||
sourcemap: 'inline',
|
||||
packages: 'external',
|
||||
logLevel: 'warning',
|
||||
});
|
||||
|
||||
// Copy non-compiled assets from src/ to dist-test/src/ maintaining structure.
|
||||
// Tests use import.meta.url to resolve sibling .md, .yaml, .json, .ts etc.
|
||||
// Also copy original .ts files — jiti-based imports load .ts source directly.
|
||||
const srcDir = join(ROOT, 'src');
|
||||
const distSrcDir = join(ROOT, 'dist-test', 'src');
|
||||
await copyAssets(srcDir, distSrcDir);
|
||||
console.log('Copied non-TS assets and .ts source files to dist-test/src/');
|
||||
|
||||
// Copy packages/*/src/ assets as well
|
||||
for (const entry of pkgEntries) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
const pkgSrc = join(packagesDir, entry.name, 'src');
|
||||
const pkgDistSrc = join(ROOT, 'dist-test', 'packages', entry.name, 'src');
|
||||
await copyAssets(pkgSrc, pkgDistSrc);
|
||||
}
|
||||
|
||||
// Copy web/lib/ assets (tests import from ../../web/lib/ relative to dist-test/src/tests/)
|
||||
await copyAssets(join(ROOT, 'web', 'lib'), join(ROOT, 'dist-test', 'web', 'lib'));
|
||||
|
||||
// Copy web/components/ assets (xterm-theme test reads shell-terminal.tsx via import.meta.dirname)
|
||||
await copyAssets(join(ROOT, 'web', 'components'), join(ROOT, 'dist-test', 'web', 'components'));
|
||||
|
||||
// Copy scripts/ non-TS files (.cjs etc) — some tests require() scripts directly
|
||||
await copyAssets(join(ROOT, 'scripts'), join(ROOT, 'dist-test', 'scripts'));
|
||||
|
||||
// Copy root package.json — some tests read it to check version/engines fields
|
||||
await cp(join(ROOT, 'package.json'), join(ROOT, 'dist-test', 'package.json'), { force: true });
|
||||
|
||||
// Copy root dist/ into dist-test/dist/ — some tests compute projectRoot as
|
||||
// 3 levels up from dist-test/src/tests/ which lands at dist-test/, then
|
||||
// import from dist/mcp-server.js etc.
|
||||
const rootDistDir = join(ROOT, 'dist');
|
||||
const distTestDistDir = join(ROOT, 'dist-test', 'dist');
|
||||
await copyAssets(rootDistDir, distTestDistDir);
|
||||
|
||||
// Post-process: rewrite .ts import specifiers to .js in all compiled JS files.
|
||||
// esbuild with bundle:false preserves original specifiers; Node can't load .ts.
|
||||
const compiledJsFiles = await collectFiles(join(ROOT, 'dist-test'), ['.js']);
|
||||
// Regex matches .ts in from/import() strings but not sourceMappingURL comments
|
||||
const tsImportRe = /(from\s+["'])(\.\.?\/[^"']*?)\.ts(["'])/g;
|
||||
const tsDynImportRe = /(import\(["'])(\.\.?\/[^"']*?)\.ts(["'])\)/g;
|
||||
|
||||
let rewritten = 0;
|
||||
await Promise.all(compiledJsFiles.map(async (file) => {
|
||||
const src = await readFile(file, 'utf-8');
|
||||
const out = src
|
||||
.replace(tsImportRe, (_, a, b, c) => `${a}${b}.js${c}`)
|
||||
.replace(tsDynImportRe, (_, a, b, c) => `${a}${b}.js${c})`);
|
||||
if (out !== src) {
|
||||
await writeFile(file, out, 'utf-8');
|
||||
rewritten++;
|
||||
}
|
||||
}));
|
||||
if (rewritten > 0) {
|
||||
console.log(`Rewrote .ts → .js imports in ${rewritten} files`);
|
||||
}
|
||||
|
||||
// Remove stale compiled test files: dist-test entries whose source no longer exists
|
||||
// in a non-integration source directory (e.g. test moved to integration/).
|
||||
// Only cleans *.test.js and *.test.ts files to avoid touching non-test outputs.
|
||||
const { rm } = await import('node:fs/promises');
|
||||
const { existsSync } = await import('node:fs');
|
||||
const testDirsToClean = [
|
||||
[join(ROOT, 'dist-test', 'src', 'tests'), join(ROOT, 'src', 'tests')],
|
||||
[join(ROOT, 'dist-test', 'src', 'resources', 'extensions', 'gsd', 'tests'),
|
||||
join(ROOT, 'src', 'resources', 'extensions', 'gsd', 'tests')],
|
||||
];
|
||||
let staleCleaned = 0;
|
||||
for (const [distDir, srcDir] of testDirsToClean) {
|
||||
let distEntries;
|
||||
try { distEntries = await readdir(distDir, { withFileTypes: true }); } catch { continue; }
|
||||
for (const entry of distEntries) {
|
||||
if (!entry.isFile()) continue;
|
||||
if (!entry.name.match(/\.test\.(js|ts)$/)) continue;
|
||||
const stem = entry.name.replace(/\.(js|ts)$/, '');
|
||||
// Source could be .ts or .mjs (esbuild compiles both to .js)
|
||||
const hasTsSrc = existsSync(join(srcDir, stem + '.ts'));
|
||||
const hasMjsSrc = existsSync(join(srcDir, stem + '.mjs'));
|
||||
if (!hasTsSrc && !hasMjsSrc) {
|
||||
await rm(join(distDir, entry.name));
|
||||
staleCleaned++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (staleCleaned > 0) {
|
||||
console.log(`Removed ${staleCleaned} stale compiled test files from dist-test/`);
|
||||
}
|
||||
|
||||
// Ensure dist-test/node_modules exists so resource-loader.ts (which computes
|
||||
// packageRoot from import.meta.url) resolves gsdNodeModules to a real path.
|
||||
// Without this, initResources creates dangling symlinks in test environments.
|
||||
const distNodeModules = join(ROOT, 'dist-test', 'node_modules');
|
||||
if (!existsSync(distNodeModules)) {
|
||||
symlinkSync(join(ROOT, 'node_modules'), distNodeModules);
|
||||
}
|
||||
|
||||
const elapsed = ((Date.now() - start) / 1000).toFixed(2);
|
||||
console.log(`Done in ${elapsed}s`);
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
46
scripts/dist-test-resolve.mjs
Normal file
46
scripts/dist-test-resolve.mjs
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
/**
|
||||
* Minimal Node.js import hook for running tests from dist-test/.
|
||||
*
|
||||
* esbuild with bundle:false preserves import specifiers verbatim, so compiled
|
||||
* .js files still import '../foo.ts'. This hook redirects those to '.js' so
|
||||
* Node can find the compiled output.
|
||||
*
|
||||
* Also redirects @gsd bare imports to their compiled counterparts in dist-test.
|
||||
*/
|
||||
|
||||
import { fileURLToPath, pathToFileURL } from 'node:url';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
|
||||
// dist-test root — everything compiled lands here
|
||||
const DIST_TEST = new URL('../dist-test/', import.meta.url).href;
|
||||
|
||||
// Absolute paths to compiled @gsd/* entry points
|
||||
const GSD_ALIASES = {
|
||||
'@gsd/pi-coding-agent': new URL('../dist-test/packages/pi-coding-agent/src/index.js', import.meta.url).href,
|
||||
'@gsd/pi-ai/oauth': new URL('../dist-test/packages/pi-ai/src/utils/oauth/index.js', import.meta.url).href,
|
||||
'@gsd/pi-ai': new URL('../dist-test/packages/pi-ai/src/index.js', import.meta.url).href,
|
||||
'@gsd/pi-agent-core': new URL('../dist-test/packages/pi-agent-core/src/index.js', import.meta.url).href,
|
||||
'@gsd/pi-tui': new URL('../dist-test/packages/pi-tui/src/index.js', import.meta.url).href,
|
||||
'@gsd/native': new URL('../dist-test/packages/native/src/index.js', import.meta.url).href,
|
||||
};
|
||||
|
||||
export function resolve(specifier, context, nextResolve) {
|
||||
// 1. @gsd/* bare imports → compiled dist-test counterpart
|
||||
if (specifier in GSD_ALIASES) {
|
||||
return nextResolve(GSD_ALIASES[specifier], context);
|
||||
}
|
||||
|
||||
// 2. .ts relative imports inside dist-test → .js
|
||||
if (
|
||||
specifier.endsWith('.ts') &&
|
||||
(specifier.startsWith('./') || specifier.startsWith('../')) &&
|
||||
context.parentURL &&
|
||||
context.parentURL.startsWith(DIST_TEST)
|
||||
) {
|
||||
const jsSpecifier = specifier.slice(0, -3) + '.js';
|
||||
return nextResolve(jsSpecifier, context);
|
||||
}
|
||||
|
||||
return nextResolve(specifier, context);
|
||||
}
|
||||
|
|
@ -18,25 +18,6 @@ const { existsSync, statSync, readdirSync } = require('fs')
|
|||
const { resolve, join } = require('path')
|
||||
const { execSync } = require('child_process')
|
||||
|
||||
const root = resolve(__dirname, '..')
|
||||
const packagesDir = join(root, 'packages')
|
||||
|
||||
// Skip if packages/ doesn't exist (published tarball / end-user install)
|
||||
if (!existsSync(packagesDir)) process.exit(0)
|
||||
|
||||
// Skip in CI — the pipeline runs `npm run build` explicitly
|
||||
if (process.env.CI === 'true' || process.env.CI === '1') process.exit(0)
|
||||
|
||||
// Workspace packages that need dist/index.js at runtime.
|
||||
// Order matters: dependencies must build before dependents.
|
||||
const WORKSPACE_PACKAGES = [
|
||||
'native',
|
||||
'pi-tui',
|
||||
'pi-ai',
|
||||
'pi-agent-core',
|
||||
'pi-coding-agent',
|
||||
]
|
||||
|
||||
/**
|
||||
* Returns the most recent mtime (ms) of any .ts file under dir, recursively.
|
||||
* Returns 0 if no .ts files found.
|
||||
|
|
@ -56,31 +37,54 @@ function newestSrcMtime(dir) {
|
|||
return newest
|
||||
}
|
||||
|
||||
const stale = []
|
||||
for (const pkg of WORKSPACE_PACKAGES) {
|
||||
const distIndex = join(packagesDir, pkg, 'dist', 'index.js')
|
||||
if (!existsSync(distIndex)) {
|
||||
stale.push(pkg)
|
||||
continue
|
||||
if (require.main === module) {
|
||||
const root = resolve(__dirname, '..')
|
||||
const packagesDir = join(root, 'packages')
|
||||
|
||||
// Skip if packages/ doesn't exist (published tarball / end-user install)
|
||||
if (!existsSync(packagesDir)) process.exit(0)
|
||||
|
||||
// Skip in CI — the pipeline runs `npm run build` explicitly
|
||||
if (process.env.CI === 'true' || process.env.CI === '1') process.exit(0)
|
||||
|
||||
// Workspace packages that need dist/index.js at runtime.
|
||||
// Order matters: dependencies must build before dependents.
|
||||
const WORKSPACE_PACKAGES = [
|
||||
'native',
|
||||
'pi-tui',
|
||||
'pi-ai',
|
||||
'pi-agent-core',
|
||||
'pi-coding-agent',
|
||||
]
|
||||
|
||||
const stale = []
|
||||
for (const pkg of WORKSPACE_PACKAGES) {
|
||||
const distIndex = join(packagesDir, pkg, 'dist', 'index.js')
|
||||
if (!existsSync(distIndex)) {
|
||||
stale.push(pkg)
|
||||
continue
|
||||
}
|
||||
const distMtime = statSync(distIndex).mtimeMs
|
||||
const srcMtime = newestSrcMtime(join(packagesDir, pkg, 'src'))
|
||||
if (srcMtime > distMtime) {
|
||||
stale.push(pkg)
|
||||
}
|
||||
}
|
||||
const distMtime = statSync(distIndex).mtimeMs
|
||||
const srcMtime = newestSrcMtime(join(packagesDir, pkg, 'src'))
|
||||
if (srcMtime > distMtime) {
|
||||
stale.push(pkg)
|
||||
|
||||
if (stale.length === 0) process.exit(0)
|
||||
|
||||
process.stderr.write(` Building ${stale.length} workspace package(s) with stale or missing dist/: ${stale.join(', ')}\n`)
|
||||
|
||||
for (const pkg of stale) {
|
||||
const pkgDir = join(packagesDir, pkg)
|
||||
try {
|
||||
execSync('npm run build', { cwd: pkgDir, stdio: 'pipe' })
|
||||
process.stderr.write(` ✓ ${pkg}\n`)
|
||||
} catch (err) {
|
||||
process.stderr.write(` ✗ ${pkg} build failed: ${err.message}\n`)
|
||||
// Non-fatal — the user can run `npm run build` manually
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (stale.length === 0) process.exit(0)
|
||||
|
||||
process.stderr.write(` Building ${stale.length} workspace package(s) with stale or missing dist/: ${stale.join(', ')}\n`)
|
||||
|
||||
for (const pkg of stale) {
|
||||
const pkgDir = join(packagesDir, pkg)
|
||||
try {
|
||||
execSync('npm run build', { cwd: pkgDir, stdio: 'pipe' })
|
||||
process.stderr.write(` ✓ ${pkg}\n`)
|
||||
} catch (err) {
|
||||
process.stderr.write(` ✗ ${pkg} build failed: ${err.message}\n`)
|
||||
// Non-fatal — the user can run `npm run build` manually
|
||||
}
|
||||
}
|
||||
module.exports = { newestSrcMtime }
|
||||
|
|
|
|||
852
scripts/parallel-monitor.mjs
Executable file
852
scripts/parallel-monitor.mjs
Executable file
|
|
@ -0,0 +1,852 @@
|
|||
#!/usr/bin/env node
|
||||
/**
|
||||
* GSD Parallel Worker Monitor
|
||||
*
|
||||
* Real-time TUI dashboard for monitoring parallel GSD auto-mode workers.
|
||||
* Zero dependencies — uses raw ANSI escape codes, Node.js builtins only.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/parallel-monitor.mjs # live dashboard, 5s refresh
|
||||
* node scripts/parallel-monitor.mjs --interval 3 # faster refresh
|
||||
* node scripts/parallel-monitor.mjs --once # single snapshot, then exit
|
||||
* node scripts/parallel-monitor.mjs --heal # auto-respawn dead workers
|
||||
* node scripts/parallel-monitor.mjs --heal --heal-retries 5 --heal-cooldown 60
|
||||
*
|
||||
* Options:
|
||||
* --interval <sec> Refresh interval in seconds (default: 5)
|
||||
* --once Render once and exit (useful for scripting/piping)
|
||||
* --heal Auto-respawn dead workers (opt-in, off by default)
|
||||
* --heal-retries <n> Max respawn attempts per worker (default: 3)
|
||||
* --heal-cooldown <sec> Seconds between respawn attempts (default: 30)
|
||||
* --dir <path> Status file directory (default: .gsd/parallel)
|
||||
* --root <path> Project root (default: cwd)
|
||||
*
|
||||
* Data sources:
|
||||
* .gsd/parallel/M0xx.status.json — heartbeat, cost, state (written by orchestrator)
|
||||
* .gsd/worktrees/M0xx/.gsd/auto.lock — current unit type + ID (written by worker)
|
||||
* .gsd/worktrees/M0xx/.gsd/gsd.db — task/slice completion (SQLite, queried via cli)
|
||||
* .gsd/parallel/M0xx.stdout.log — NDJSON events (cost extraction, notify messages)
|
||||
* .gsd/parallel/M0xx.stderr.log — error surfacing
|
||||
*
|
||||
* Health indicators:
|
||||
* ● green — PID alive, fresh heartbeat (<30s)
|
||||
* ● green — PID alive, heartbeat stale (respawned worker, file mtime used as proxy)
|
||||
* ○ red — PID dead
|
||||
*
|
||||
* Self-healing (--heal):
|
||||
* When a dead worker is detected, the monitor writes a temp shell script and launches
|
||||
* a new headless auto-mode process in the worker's worktree with the correct env vars.
|
||||
* Cooldown prevents rapid respawn loops. Gives up after --heal-retries consecutive
|
||||
* failures. Resets retry count when a worker comes back alive.
|
||||
*/
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
// ─── Configuration ───────────────────────────────────────────────────────────
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const INTERVAL_SEC = parseInt(getArg('--interval', '5'), 10);
|
||||
const PARALLEL_DIR = getArg('--dir', '.gsd/parallel');
|
||||
const PROJECT_ROOT = getArg('--root', process.cwd());
|
||||
const ONE_SHOT = args.includes('--once');
|
||||
const HEAL_MODE = args.includes('--heal');
|
||||
const HEAL_MAX_RETRIES = parseInt(getArg('--heal-retries', '3'), 10);
|
||||
const HEAL_COOLDOWN_SEC = parseInt(getArg('--heal-cooldown', '30'), 10);
|
||||
|
||||
// Per-worker heal state: { lastAttempt: number, retries: number }
|
||||
const healState = {};
|
||||
|
||||
function getArg(flag, defaultVal) {
|
||||
const idx = args.indexOf(flag);
|
||||
return idx !== -1 && args[idx + 1] ? args[idx + 1] : defaultVal;
|
||||
}
|
||||
|
||||
// ─── ANSI Helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
const ESC = '\x1b[';
|
||||
const RESET = `${ESC}0m`;
|
||||
const BOLD = `${ESC}1m`;
|
||||
const DIM = `${ESC}2m`;
|
||||
const ITALIC = `${ESC}3m`;
|
||||
|
||||
const FG = {
|
||||
black: `${ESC}30m`,
|
||||
red: `${ESC}31m`,
|
||||
green: `${ESC}32m`,
|
||||
yellow: `${ESC}33m`,
|
||||
blue: `${ESC}34m`,
|
||||
magenta: `${ESC}35m`,
|
||||
cyan: `${ESC}36m`,
|
||||
white: `${ESC}37m`,
|
||||
gray: `${ESC}90m`,
|
||||
};
|
||||
|
||||
const BG = {
|
||||
black: `${ESC}40m`,
|
||||
red: `${ESC}41m`,
|
||||
green: `${ESC}42m`,
|
||||
yellow: `${ESC}43m`,
|
||||
blue: `${ESC}44m`,
|
||||
white: `${ESC}47m`,
|
||||
};
|
||||
|
||||
// Screen control
|
||||
const CLEAR_SCREEN = `${ESC}2J${ESC}H`;
|
||||
const HIDE_CURSOR = `${ESC}?25l`;
|
||||
const SHOW_CURSOR = `${ESC}?25h`;
|
||||
const SAVE_POS = `${ESC}s`;
|
||||
const RESTORE_POS = `${ESC}u`;
|
||||
|
||||
function moveTo(row, col) { return `${ESC}${row};${col}H`; }
|
||||
|
||||
// ─── Data Reading ────────────────────────────────────────────────────────────
|
||||
|
||||
function readJsonSafe(filePath) {
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function isPidAlive(pid) {
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function discoverWorkers() {
|
||||
const dir = path.resolve(PROJECT_ROOT, PARALLEL_DIR);
|
||||
const worktreeDir = path.resolve(PROJECT_ROOT, '.gsd/worktrees');
|
||||
const mids = new Set();
|
||||
|
||||
// From status files
|
||||
if (fs.existsSync(dir)) {
|
||||
for (const f of fs.readdirSync(dir)) {
|
||||
if (f.endsWith('.status.json')) mids.add(f.replace('.status.json', ''));
|
||||
}
|
||||
}
|
||||
|
||||
// From stderr/stdout logs (manually respawned workers may lack status.json)
|
||||
if (fs.existsSync(dir)) {
|
||||
for (const f of fs.readdirSync(dir)) {
|
||||
const m = f.match(/^(M\d+)\.(stderr|stdout)\.log$/);
|
||||
if (m) mids.add(m[1]);
|
||||
}
|
||||
}
|
||||
|
||||
// From worktree directories that have auto.lock (actively running)
|
||||
if (fs.existsSync(worktreeDir)) {
|
||||
for (const d of fs.readdirSync(worktreeDir)) {
|
||||
if (d.startsWith('M') && fs.existsSync(path.join(worktreeDir, d, '.gsd', 'auto.lock'))) {
|
||||
mids.add(d);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return [...mids].sort();
|
||||
}
|
||||
|
||||
function readWorkerStatus(mid) {
|
||||
const statusPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.status.json`);
|
||||
return readJsonSafe(statusPath);
|
||||
}
|
||||
|
||||
function readAutoLock(mid) {
|
||||
const lockPath = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}/.gsd/auto.lock`);
|
||||
return readJsonSafe(lockPath);
|
||||
}
|
||||
|
||||
function querySliceProgress(mid) {
|
||||
const dbPath = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}/.gsd/gsd.db`);
|
||||
if (!fs.existsSync(dbPath)) return [];
|
||||
|
||||
try {
|
||||
const sql = `SELECT s.id, s.status, COUNT(t.id), SUM(CASE WHEN t.status='complete' THEN 1 ELSE 0 END) FROM slices s LEFT JOIN tasks t ON s.milestone_id=t.milestone_id AND s.id=t.slice_id WHERE s.milestone_id='${mid}' GROUP BY s.id ORDER BY s.id`;
|
||||
const out = execSync(`sqlite3 "${dbPath}" "${sql}"`, { timeout: 3000, encoding: 'utf-8' }).trim();
|
||||
if (!out) return [];
|
||||
return out.split('\n').map(line => {
|
||||
const [id, status, total, done] = line.split('|');
|
||||
return { id, status, total: parseInt(total, 10), done: parseInt(done || '0', 10) };
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function readRecentEvents(mid, maxLines = 5) {
|
||||
const stdoutPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stdout.log`);
|
||||
const notifications = [];
|
||||
const errors = [];
|
||||
|
||||
// Parse NDJSON notify events from stdout log
|
||||
if (fs.existsSync(stdoutPath)) {
|
||||
try {
|
||||
const stat = fs.statSync(stdoutPath);
|
||||
const readSize = Math.min(stat.size, 32768);
|
||||
const fd = fs.openSync(stdoutPath, 'r');
|
||||
const buf = Buffer.alloc(readSize);
|
||||
fs.readSync(fd, buf, 0, readSize, Math.max(0, stat.size - readSize));
|
||||
fs.closeSync(fd);
|
||||
const content = buf.toString('utf-8');
|
||||
const lines = content.trim().split('\n').slice(-100);
|
||||
|
||||
for (const line of lines) {
|
||||
try {
|
||||
const obj = JSON.parse(line);
|
||||
if (obj.method === 'notify' && obj.message) {
|
||||
notifications.push({ ts: Date.now(), msg: obj.message, mid });
|
||||
}
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
|
||||
// Parse errors from stderr log — only new bytes since monitor started
|
||||
const stderrPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stderr.log`);
|
||||
if (fs.existsSync(stderrPath)) {
|
||||
try {
|
||||
const stat = fs.statSync(stderrPath);
|
||||
|
||||
// Record baseline on first read — skip pre-existing errors
|
||||
if (!(mid in stderrBaselines)) {
|
||||
stderrBaselines[mid] = stat.size;
|
||||
}
|
||||
|
||||
const baseline = stderrBaselines[mid];
|
||||
const newBytes = stat.size - baseline;
|
||||
|
||||
if (newBytes > 0) {
|
||||
const readSize = Math.min(newBytes, 4096);
|
||||
const fd = fs.openSync(stderrPath, 'r');
|
||||
const buf = Buffer.alloc(readSize);
|
||||
fs.readSync(fd, buf, 0, readSize, Math.max(baseline, stat.size - readSize));
|
||||
fs.closeSync(fd);
|
||||
const content = buf.toString('utf-8');
|
||||
const lines = content.trim().split('\n').slice(-10);
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.includes('error') || line.includes('Error') || line.includes('WARN') || line.includes('exited')) {
|
||||
errors.push({ ts: Date.now(), msg: line.trim(), mid, isError: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
|
||||
return {
|
||||
notifications: notifications.slice(-maxLines),
|
||||
errors: errors.slice(-3),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract accumulated cost from NDJSON stdout log (fallback when status.json is missing).
|
||||
* Sums `message.usage.cost.total` from all `message_end` events.
|
||||
*/
|
||||
function extractCostFromNdjson(mid) {
|
||||
const stdoutPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stdout.log`);
|
||||
if (!fs.existsSync(stdoutPath)) return 0;
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(stdoutPath, 'utf-8');
|
||||
let total = 0;
|
||||
for (const line of content.split('\n')) {
|
||||
if (!line.includes('message_end')) continue;
|
||||
try {
|
||||
const obj = JSON.parse(line);
|
||||
if (obj.type === 'message_end') {
|
||||
const cost = obj.message?.usage?.cost?.total;
|
||||
if (typeof cost === 'number') total += cost;
|
||||
}
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
return total;
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Self-Healing ────────────────────────────────────────────────────────────
|
||||
|
||||
// Auto-detect the GSD loader path — works across npm global, homebrew, and local installs
|
||||
function findGsdLoader() {
|
||||
// 1. Check if we're running from inside the gsd-2 repo itself
|
||||
const repoLoader = path.resolve(import.meta.dirname, '..', 'dist', 'loader.js');
|
||||
if (fs.existsSync(repoLoader)) return repoLoader;
|
||||
|
||||
// 2. Check common global install locations
|
||||
try {
|
||||
const globalRoot = execSync('npm root -g', { encoding: 'utf-8', timeout: 3000 }).trim();
|
||||
const candidates = [
|
||||
path.join(globalRoot, 'gsd-pi', 'dist', 'loader.js'),
|
||||
path.join(globalRoot, '@gsd', 'pi', 'dist', 'loader.js'),
|
||||
];
|
||||
for (const c of candidates) {
|
||||
if (fs.existsSync(c)) return c;
|
||||
}
|
||||
} catch { /* skip */ }
|
||||
|
||||
// 3. Try `which gsd` and resolve symlink
|
||||
try {
|
||||
const bin = execSync('which gsd', { encoding: 'utf-8', timeout: 3000 }).trim();
|
||||
if (bin) {
|
||||
const realBin = fs.realpathSync(bin);
|
||||
const loader = path.resolve(path.dirname(realBin), '..', 'dist', 'loader.js');
|
||||
if (fs.existsSync(loader)) return loader;
|
||||
}
|
||||
} catch { /* skip */ }
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const GSD_LOADER = findGsdLoader();
|
||||
|
||||
/**
|
||||
* Respawn a dead worker. Returns the new PID or null on failure.
|
||||
* Uses nohup + output redirection so the child is fully detached.
|
||||
*/
|
||||
function respawnWorker(mid) {
|
||||
const worktreeDir = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}`);
|
||||
if (!fs.existsSync(worktreeDir)) return null;
|
||||
if (!fs.existsSync(GSD_LOADER)) return null;
|
||||
|
||||
const stdoutLog = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stdout.log`);
|
||||
const stderrLog = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stderr.log`);
|
||||
|
||||
try {
|
||||
const env = [
|
||||
`GSD_MILESTONE_LOCK=${mid}`,
|
||||
`GSD_PROJECT_ROOT=${PROJECT_ROOT}`,
|
||||
`GSD_PARALLEL_WORKER=1`,
|
||||
].join(' ');
|
||||
|
||||
// Use a shell script written to a temp file to avoid quoting hell
|
||||
const script = [
|
||||
'#!/bin/bash',
|
||||
`cd "${worktreeDir}"`,
|
||||
`export GSD_MILESTONE_LOCK=${mid}`,
|
||||
`export GSD_PROJECT_ROOT="${PROJECT_ROOT}"`,
|
||||
`export GSD_PARALLEL_WORKER=1`,
|
||||
`exec node "${GSD_LOADER}" headless --json auto > "${stdoutLog}" 2>> "${stderrLog}"`,
|
||||
].join('\n');
|
||||
|
||||
const scriptPath = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.respawn.sh`);
|
||||
fs.writeFileSync(scriptPath, script, { mode: 0o755 });
|
||||
|
||||
// Launch detached via nohup
|
||||
const result = execSync(
|
||||
`nohup bash "${scriptPath}" > /dev/null 2>&1 & echo $!`,
|
||||
{ timeout: 5000, encoding: 'utf-8', cwd: worktreeDir }
|
||||
).trim();
|
||||
|
||||
// Clean up the temp script after a delay (process already forked)
|
||||
setTimeout(() => {
|
||||
try { fs.unlinkSync(scriptPath); } catch {}
|
||||
}, 5000);
|
||||
|
||||
const newPid = parseInt(result, 10);
|
||||
return isNaN(newPid) ? null : newPid;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check all workers and respawn dead ones if --heal is active.
|
||||
* Returns an array of heal events for the event feed.
|
||||
*/
|
||||
function healWorkers(workers) {
|
||||
if (!HEAL_MODE) return [];
|
||||
|
||||
const events = [];
|
||||
const now = Date.now();
|
||||
|
||||
for (const wk of workers) {
|
||||
if (wk.alive) {
|
||||
// Worker is alive — reset its heal state on success
|
||||
if (healState[wk.mid]) {
|
||||
healState[wk.mid].retries = 0;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Worker is dead — check if we should attempt a respawn
|
||||
if (!healState[wk.mid]) {
|
||||
healState[wk.mid] = { lastAttempt: 0, retries: 0 };
|
||||
}
|
||||
|
||||
const hs = healState[wk.mid];
|
||||
|
||||
// Give up after max retries
|
||||
if (hs.retries >= HEAL_MAX_RETRIES) {
|
||||
if (hs.retries === HEAL_MAX_RETRIES) {
|
||||
events.push({
|
||||
ts: now, mid: wk.mid,
|
||||
msg: `⛔ ${wk.mid}: gave up after ${HEAL_MAX_RETRIES} respawn attempts`
|
||||
});
|
||||
hs.retries++; // Increment past max so this message only shows once
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Cooldown — don't respawn too quickly
|
||||
const elapsed = now - hs.lastAttempt;
|
||||
if (elapsed < HEAL_COOLDOWN_SEC * 1000) {
|
||||
const remaining = Math.ceil((HEAL_COOLDOWN_SEC * 1000 - elapsed) / 1000);
|
||||
// Don't spam the feed — only note on first cooldown tick
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check the milestone isn't already complete
|
||||
const allSlicesDone = wk.slices.length > 0 && wk.slices.every(s => s.status === 'complete');
|
||||
if (allSlicesDone) {
|
||||
events.push({ ts: now, mid: wk.mid, msg: `✅ ${wk.mid}: all slices complete, no respawn needed` });
|
||||
hs.retries = HEAL_MAX_RETRIES + 1; // Don't try again
|
||||
continue;
|
||||
}
|
||||
|
||||
// Attempt respawn
|
||||
hs.lastAttempt = now;
|
||||
hs.retries++;
|
||||
|
||||
events.push({
|
||||
ts: now, mid: wk.mid,
|
||||
msg: `🔄 ${wk.mid}: respawning (attempt ${hs.retries}/${HEAL_MAX_RETRIES})...`
|
||||
});
|
||||
|
||||
const newPid = respawnWorker(wk.mid);
|
||||
|
||||
if (newPid) {
|
||||
events.push({
|
||||
ts: now, mid: wk.mid,
|
||||
msg: `🟢 ${wk.mid}: respawned as PID ${newPid}`
|
||||
});
|
||||
// Reset stderr baseline so we don't show old errors
|
||||
delete stderrBaselines[wk.mid];
|
||||
} else {
|
||||
events.push({
|
||||
ts: now, mid: wk.mid, isError: true,
|
||||
msg: `❌ ${wk.mid}: respawn failed`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
// ─── Formatting Helpers ──────────────────────────────────────────────────────
|
||||
|
||||
function formatDuration(ms) {
|
||||
if (!ms || ms < 0) return '--:--';
|
||||
const totalSec = Math.floor(ms / 1000);
|
||||
const h = Math.floor(totalSec / 3600);
|
||||
const m = Math.floor((totalSec % 3600) / 60);
|
||||
const s = totalSec % 60;
|
||||
if (h > 0) return `${h}h${String(m).padStart(2, '0')}m`;
|
||||
return `${String(m).padStart(2, '0')}m${String(s).padStart(2, '0')}s`;
|
||||
}
|
||||
|
||||
function formatCost(cost) {
|
||||
if (cost == null) return '$-.--';
|
||||
return `$${cost.toFixed(2)}`;
|
||||
}
|
||||
|
||||
function healthColor(heartbeatAge, alive) {
|
||||
if (!alive) return 'red';
|
||||
// PID alive is the strongest signal — worker is running
|
||||
if (heartbeatAge < 30000) return 'green';
|
||||
// Alive but stale heartbeat — either respawned (no orchestrator writing status.json)
|
||||
// or potentially stuck. Show green since headless idle timeout (120s) kills stuck workers.
|
||||
if (alive) return 'green';
|
||||
return 'red';
|
||||
}
|
||||
|
||||
function healthIcon(color) {
|
||||
switch (color) {
|
||||
case 'green': return '●';
|
||||
case 'yellow': return '◐';
|
||||
case 'red': return '○';
|
||||
default: return '?';
|
||||
}
|
||||
}
|
||||
|
||||
function unitTypeLabel(unitType) {
|
||||
const labels = {
|
||||
'execute-task': 'EXEC',
|
||||
'research-slice': 'RSRCH',
|
||||
'plan-slice': 'PLAN',
|
||||
'complete-slice': 'DONE',
|
||||
'complete-task': 'DONE',
|
||||
'reassess': 'ASSESS',
|
||||
'validate': 'VALID',
|
||||
};
|
||||
return labels[unitType] || (unitType || '---').toUpperCase().slice(0, 5);
|
||||
}
|
||||
|
||||
function progressBar(done, total, width = 20) {
|
||||
if (total === 0) return `${'░'.repeat(width)}`;
|
||||
const filled = Math.round((done / total) * width);
|
||||
const empty = width - filled;
|
||||
return `${'█'.repeat(filled)}${'░'.repeat(empty)}`;
|
||||
}
|
||||
|
||||
function pad(str, width) {
|
||||
const s = String(str);
|
||||
return s.length >= width ? s.slice(0, width) : s + ' '.repeat(width - s.length);
|
||||
}
|
||||
|
||||
function rpad(str, width) {
|
||||
const s = String(str);
|
||||
return s.length >= width ? s.slice(0, width) : ' '.repeat(width - s.length) + s;
|
||||
}
|
||||
|
||||
function truncate(str, maxLen) {
|
||||
if (str.length <= maxLen) return str;
|
||||
return str.slice(0, maxLen - 1) + '…';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recently completed tasks/slices from the worktree DB for the event feed.
|
||||
*/
|
||||
function queryRecentCompletions(mid) {
|
||||
const dbPath = path.resolve(PROJECT_ROOT, `.gsd/worktrees/${mid}/.gsd/gsd.db`);
|
||||
if (!fs.existsSync(dbPath)) return [];
|
||||
|
||||
try {
|
||||
// Completed tasks with timestamps, most recent first
|
||||
const sql = `SELECT id, slice_id, one_liner, completed_at FROM tasks WHERE milestone_id='${mid}' AND status='complete' AND completed_at IS NOT NULL ORDER BY completed_at DESC LIMIT 5`;
|
||||
const out = execSync(`sqlite3 "${dbPath}" "${sql}"`, { timeout: 3000, encoding: 'utf-8' }).trim();
|
||||
if (!out) return [];
|
||||
return out.split('\n').map(line => {
|
||||
const [taskId, sliceId, oneLiner, completedAt] = line.split('|');
|
||||
return {
|
||||
ts: completedAt ? new Date(completedAt).getTime() : Date.now(),
|
||||
msg: `✓ ${mid}/${sliceId}/${taskId}${oneLiner ? ': ' + oneLiner : ''}`,
|
||||
mid,
|
||||
};
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Rendering ───────────────────────────────────────────────────────────────
|
||||
|
||||
const COLS = Math.max(process.stdout.columns || 100, 80);
|
||||
const ROWS = Math.max(process.stdout.rows || 40, 20);
|
||||
|
||||
let lastEventFeed = []; // Persisted across renders
|
||||
const stderrBaselines = {}; // mid → file size at monitor startup (skip pre-existing errors)
|
||||
|
||||
function collectWorkerData() {
|
||||
const mids = discoverWorkers();
|
||||
const workers = [];
|
||||
|
||||
for (const mid of mids) {
|
||||
const status = readWorkerStatus(mid);
|
||||
const lock = readAutoLock(mid);
|
||||
const slices = querySliceProgress(mid);
|
||||
const { notifications, errors } = readRecentEvents(mid, 3);
|
||||
|
||||
// Prefer auto.lock PID (written by the running worker) over status.json PID
|
||||
// (written by the orchestrator, stale after respawn)
|
||||
const pid = lock?.pid || status?.pid;
|
||||
const alive = pid ? isPidAlive(pid) : false;
|
||||
// Heartbeat: prefer status.json if its PID matches (orchestrator-managed),
|
||||
// otherwise fall back to stdout.log mtime (respawned workers write NDJSON continuously)
|
||||
let heartbeatAge = Infinity;
|
||||
const statusPidMatches = status?.pid && status.pid === pid;
|
||||
if (status?.lastHeartbeat && statusPidMatches) {
|
||||
heartbeatAge = Date.now() - status.lastHeartbeat;
|
||||
} else {
|
||||
// Check stdout/stderr log mtime as proxy heartbeat
|
||||
const stdoutLog = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stdout.log`);
|
||||
const stderrLog = path.resolve(PROJECT_ROOT, PARALLEL_DIR, `${mid}.stderr.log`);
|
||||
try {
|
||||
const mtimes = [];
|
||||
if (fs.existsSync(stdoutLog)) mtimes.push(fs.statSync(stdoutLog).mtimeMs);
|
||||
if (fs.existsSync(stderrLog)) mtimes.push(fs.statSync(stderrLog).mtimeMs);
|
||||
if (lock?.unitStartedAt) mtimes.push(new Date(lock.unitStartedAt).getTime());
|
||||
if (mtimes.length > 0) heartbeatAge = Date.now() - Math.max(...mtimes);
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
|
||||
// Cost: prefer status.json, fall back to NDJSON log parsing
|
||||
let cost = status?.cost || 0;
|
||||
if (cost === 0) {
|
||||
cost = extractCostFromNdjson(mid);
|
||||
}
|
||||
|
||||
const totalTasks = slices.reduce((sum, s) => sum + s.total, 0);
|
||||
const doneTasks = slices.reduce((sum, s) => sum + s.done, 0);
|
||||
const doneSlices = slices.filter(s => s.status === 'complete').length;
|
||||
const totalSlices = slices.length;
|
||||
|
||||
// Current unit from auto.lock (more accurate than status.json currentUnit)
|
||||
const currentUnit = lock?.unitId || status?.currentUnit || null;
|
||||
const unitType = lock?.unitType || null;
|
||||
const unitStarted = lock?.unitStartedAt ? new Date(lock.unitStartedAt).getTime() : null;
|
||||
|
||||
// If no lock and worker is dead, show nothing (not a misleading "START" label)
|
||||
const showUnit = currentUnit || (alive ? null : null);
|
||||
|
||||
const elapsed = status?.startedAt
|
||||
? Date.now() - status.startedAt
|
||||
: (lock?.startedAt ? Date.now() - new Date(lock.startedAt).getTime() : 0);
|
||||
|
||||
workers.push({
|
||||
mid,
|
||||
pid,
|
||||
alive,
|
||||
state: alive ? 'running' : (status?.state || 'dead'),
|
||||
cost,
|
||||
heartbeatAge,
|
||||
health: healthColor(heartbeatAge, alive),
|
||||
currentUnit,
|
||||
unitType,
|
||||
unitElapsed: unitStarted ? Date.now() - unitStarted : 0,
|
||||
elapsed,
|
||||
totalTasks,
|
||||
doneTasks,
|
||||
totalSlices,
|
||||
doneSlices,
|
||||
slices,
|
||||
notifications,
|
||||
errors,
|
||||
});
|
||||
}
|
||||
|
||||
return workers;
|
||||
}
|
||||
|
||||
function render(workers) {
|
||||
const buf = [];
|
||||
const w = COLS;
|
||||
|
||||
// ── Header ──
|
||||
buf.push('');
|
||||
const title = ' GSD Parallel Monitor ';
|
||||
const titlePad = Math.max(0, Math.floor((w - title.length) / 2));
|
||||
buf.push(
|
||||
`${' '.repeat(titlePad)}${BOLD}${BG.blue}${FG.white}${title}${RESET}`
|
||||
);
|
||||
|
||||
const now = new Date().toLocaleTimeString();
|
||||
const totalCost = workers.reduce((s, w) => s + w.cost, 0);
|
||||
const aliveCount = workers.filter(w => w.alive).length;
|
||||
|
||||
const healTag = HEAL_MODE ? ` │ ${FG.green}⚕ heal${RESET}${DIM}` : '';
|
||||
buf.push(
|
||||
`${DIM} ${now} │ ${aliveCount}/${workers.length} alive │ Total: ${RESET}${BOLD}${formatCost(totalCost)}${RESET}${DIM} │ Refresh: ${INTERVAL_SEC}s${healTag}${RESET}`
|
||||
);
|
||||
buf.push(`${DIM}${'─'.repeat(w)}${RESET}`);
|
||||
|
||||
// ── Worker Panels ──
|
||||
if (workers.length === 0) {
|
||||
buf.push('');
|
||||
buf.push(` ${FG.yellow}No workers found in ${PARALLEL_DIR}/${RESET}`);
|
||||
buf.push(` ${DIM}Waiting for .gsd/parallel/*.status.json files...${RESET}`);
|
||||
} else {
|
||||
for (const wk of workers) {
|
||||
buf.push('');
|
||||
|
||||
// Worker header: milestone ID + health + state
|
||||
const icon = healthIcon(wk.health);
|
||||
const hc = FG[wk.health];
|
||||
const stateLabel = wk.alive
|
||||
? (wk.state === 'running' ? `${FG.green}RUNNING${RESET}` : `${FG.yellow}${wk.state.toUpperCase()}${RESET}`)
|
||||
: `${FG.red}${BOLD}DEAD${RESET}`;
|
||||
|
||||
const heartbeatText = wk.heartbeatAge === Infinity
|
||||
? 'never'
|
||||
: formatDuration(wk.heartbeatAge) + ' ago';
|
||||
|
||||
buf.push(
|
||||
` ${hc}${icon}${RESET} ${BOLD}${wk.mid}${RESET} ${stateLabel} ${DIM}PID ${wk.pid || '?'}${RESET} ${DIM}│${RESET} ${DIM}elapsed${RESET} ${formatDuration(wk.elapsed)} ${DIM}│${RESET} ${DIM}cost${RESET} ${BOLD}${formatCost(wk.cost)}${RESET} ${DIM}│${RESET} ${DIM}heartbeat${RESET} ${hc}${heartbeatText}${RESET}`
|
||||
);
|
||||
|
||||
// Current unit
|
||||
if (wk.currentUnit) {
|
||||
const phaseColor = wk.unitType === 'execute-task' ? FG.cyan
|
||||
: wk.unitType === 'research-slice' ? FG.magenta
|
||||
: wk.unitType === 'plan-slice' ? FG.blue
|
||||
: wk.unitType?.includes('complete') ? FG.green
|
||||
: FG.white;
|
||||
|
||||
buf.push(
|
||||
` ${DIM}▸${RESET} ${phaseColor}${unitTypeLabel(wk.unitType)}${RESET} ${wk.currentUnit} ${DIM}(${formatDuration(wk.unitElapsed)})${RESET}`
|
||||
);
|
||||
} else if (!wk.alive) {
|
||||
buf.push(` ${DIM}▸ ${FG.red}stopped${RESET}`);
|
||||
} else {
|
||||
buf.push(` ${DIM}▸ idle / between units${RESET}`);
|
||||
}
|
||||
|
||||
// Slice progress grid
|
||||
if (wk.slices.length > 0) {
|
||||
const sliceChips = wk.slices.map(s => {
|
||||
const pct = s.total > 0 ? s.done / s.total : 0;
|
||||
let color;
|
||||
if (s.status === 'complete') color = FG.green;
|
||||
else if (pct > 0) color = FG.yellow;
|
||||
else color = FG.gray;
|
||||
|
||||
const label = `${s.id}:${s.done}/${s.total}`;
|
||||
return `${color}${label}${RESET}`;
|
||||
});
|
||||
|
||||
buf.push(` ${DIM}slices${RESET} ${sliceChips.join(' ')}`);
|
||||
|
||||
// Overall progress bar
|
||||
const bar = progressBar(wk.doneTasks, wk.totalTasks, 30);
|
||||
const pctStr = wk.totalTasks > 0
|
||||
? `${Math.round((wk.doneTasks / wk.totalTasks) * 100)}%`
|
||||
: '0%';
|
||||
buf.push(
|
||||
` ${DIM}tasks${RESET} ${FG.green}${bar}${RESET} ${wk.doneTasks}/${wk.totalTasks} ${DIM}(${pctStr})${RESET} ${DIM}│${RESET} ${DIM}slices done${RESET} ${wk.doneSlices}/${wk.totalSlices}`
|
||||
);
|
||||
}
|
||||
|
||||
// Recent errors from this worker
|
||||
if (wk.errors.length > 0) {
|
||||
for (const err of wk.errors.slice(-2)) {
|
||||
buf.push(` ${FG.red}⚠ ${truncate(err.msg, w - 10)}${RESET}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Separator ──
|
||||
buf.push('');
|
||||
buf.push(`${DIM}${'─'.repeat(w)}${RESET}`);
|
||||
|
||||
// ── Event Feed ──
|
||||
buf.push(` ${BOLD}Recent Events${RESET}`);
|
||||
|
||||
// Collect new notification events from all workers
|
||||
for (const wk of workers) {
|
||||
for (const evt of wk.notifications) {
|
||||
if (!lastEventFeed.some(e => e.msg === evt.msg && e.mid === evt.mid)) {
|
||||
lastEventFeed.push(evt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also add recent task completions from the DB
|
||||
for (const wk of workers) {
|
||||
const completions = queryRecentCompletions(wk.mid);
|
||||
for (const evt of completions) {
|
||||
if (!lastEventFeed.some(e => e.msg === evt.msg)) {
|
||||
lastEventFeed.push(evt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp and keep last 10
|
||||
lastEventFeed.sort((a, b) => a.ts - b.ts);
|
||||
lastEventFeed = lastEventFeed.slice(-10);
|
||||
|
||||
if (lastEventFeed.length === 0) {
|
||||
buf.push(` ${DIM}No events yet...${RESET}`);
|
||||
} else {
|
||||
for (const evt of lastEventFeed.slice(-6)) {
|
||||
const midTag = `${FG.cyan}${evt.mid}${RESET}`;
|
||||
buf.push(` ${DIM}│${RESET} ${midTag} ${truncate(evt.msg, w - 12)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Completion Check ──
|
||||
const allDone = workers.length > 0 && workers.every(w => !w.alive);
|
||||
if (allDone) {
|
||||
buf.push('');
|
||||
buf.push(`${DIM}${'─'.repeat(w)}${RESET}`);
|
||||
buf.push('');
|
||||
const doneMsg = ' ALL WORKERS COMPLETE ';
|
||||
const donePad = Math.max(0, Math.floor((w - doneMsg.length) / 2));
|
||||
buf.push(
|
||||
`${' '.repeat(donePad)}${BOLD}${BG.green}${FG.black}${doneMsg}${RESET}`
|
||||
);
|
||||
buf.push('');
|
||||
for (const wk of workers) {
|
||||
buf.push(` ${wk.mid} ${formatCost(wk.cost)} ${DIM}│${RESET} ${wk.doneSlices}/${wk.totalSlices} slices ${wk.doneTasks}/${wk.totalTasks} tasks ${DIM}│${RESET} ${formatDuration(wk.elapsed)}`);
|
||||
}
|
||||
const totalCostFinal = workers.reduce((s, w) => s + w.cost, 0);
|
||||
buf.push(` ${BOLD}Total: ${formatCost(totalCostFinal)}${RESET}`);
|
||||
}
|
||||
|
||||
// ── Footer ──
|
||||
buf.push('');
|
||||
const healInfo = HEAL_MODE
|
||||
? ` │ heal: ${HEAL_COOLDOWN_SEC}s cooldown, ${HEAL_MAX_RETRIES} max retries`
|
||||
: '';
|
||||
buf.push(` ${DIM}Ctrl+C to exit${allDone ? ' (monitoring stopped)' : ''}${healInfo}${RESET}`);
|
||||
|
||||
// Write to screen
|
||||
process.stdout.write(CLEAR_SCREEN);
|
||||
process.stdout.write(buf.join('\n') + '\n');
|
||||
|
||||
return allDone;
|
||||
}
|
||||
|
||||
// ─── Main Loop ───────────────────────────────────────────────────────────────
|
||||
|
||||
function main() {
|
||||
process.stdout.write(HIDE_CURSOR);
|
||||
|
||||
// Handle resize
|
||||
process.stdout.on('resize', () => {
|
||||
// COLS/ROWS are recalculated on next render
|
||||
});
|
||||
|
||||
// Graceful exit
|
||||
const cleanup = () => {
|
||||
process.stdout.write(SHOW_CURSOR);
|
||||
process.stdout.write(CLEAR_SCREEN);
|
||||
console.log('Monitor stopped.');
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
process.on('SIGINT', cleanup);
|
||||
process.on('SIGTERM', cleanup);
|
||||
|
||||
// Initial render
|
||||
const workers = collectWorkerData();
|
||||
const healEvents = healWorkers(workers);
|
||||
for (const evt of healEvents) lastEventFeed.push(evt);
|
||||
let done = render(workers);
|
||||
|
||||
if (done || ONE_SHOT) {
|
||||
process.stdout.write(SHOW_CURSOR);
|
||||
return;
|
||||
}
|
||||
|
||||
// Refresh loop
|
||||
const timer = setInterval(() => {
|
||||
try {
|
||||
const workers = collectWorkerData();
|
||||
const healEvents = healWorkers(workers);
|
||||
for (const evt of healEvents) lastEventFeed.push(evt);
|
||||
done = render(workers);
|
||||
|
||||
if (done) {
|
||||
clearInterval(timer);
|
||||
// Keep showing final state for 3 seconds then exit
|
||||
setTimeout(() => {
|
||||
process.stdout.write(SHOW_CURSOR);
|
||||
process.exit(0);
|
||||
}, 3000);
|
||||
}
|
||||
} catch (err) {
|
||||
// Don't crash the monitor on transient read errors
|
||||
process.stderr.write(`Monitor error: ${err.message}\n`);
|
||||
}
|
||||
}, INTERVAL_SEC * 1000);
|
||||
}
|
||||
|
||||
main();
|
||||
44
scripts/test-reporter-compact.mjs
Normal file
44
scripts/test-reporter-compact.mjs
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
/**
|
||||
* Compact test reporter: silent on pass, prints failures + final summary.
|
||||
* Usage: --test-reporter=./scripts/test-reporter-compact.mjs
|
||||
*/
|
||||
import { Transform } from 'node:stream';
|
||||
|
||||
export default class CompactReporter extends Transform {
|
||||
#pass = 0;
|
||||
#fail = 0;
|
||||
#skip = 0;
|
||||
#failures = [];
|
||||
|
||||
constructor() {
|
||||
super({ objectMode: true });
|
||||
}
|
||||
|
||||
_transform(event, _enc, cb) {
|
||||
switch (event.type) {
|
||||
case 'test:pass':
|
||||
if (!event.data.skip) this.#pass++;
|
||||
else this.#skip++;
|
||||
break;
|
||||
case 'test:fail': {
|
||||
this.#fail++;
|
||||
const { name, details } = event.data;
|
||||
const err = details?.error;
|
||||
const msg = err?.message ?? String(err ?? 'unknown');
|
||||
const loc = err?.cause?.stack?.split('\n')[1]?.trim() ?? '';
|
||||
this.#failures.push(` ✖ ${name}\n ${msg}${loc ? `\n ${loc}` : ''}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
cb();
|
||||
}
|
||||
|
||||
_flush(cb) {
|
||||
if (this.#failures.length) {
|
||||
this.push(`\n✖ failing tests:\n${this.#failures.join('\n\n')}\n`);
|
||||
}
|
||||
const status = this.#fail === 0 ? '✔' : '✖';
|
||||
this.push(`\n${status} ${this.#pass} passed, ${this.#fail} failed, ${this.#skip} skipped\n`);
|
||||
cb();
|
||||
}
|
||||
}
|
||||
45
src/cli.ts
45
src/cli.ts
|
|
@ -133,21 +133,6 @@ const isPrintMode = cliFlags.print || cliFlags.mode !== undefined
|
|||
|
||||
// Early resource-skew check — must run before TTY gate so version mismatch
|
||||
// errors surface even in non-TTY environments.
|
||||
exitIfManagedResourcesAreNewer(agentDir)
|
||||
|
||||
// Early TTY check — must come before heavy initialization to avoid dangling
|
||||
// handles that prevent process.exit() from completing promptly.
|
||||
const hasSubcommand = cliFlags.messages.length > 0
|
||||
if (!process.stdin.isTTY && !isPrintMode && !hasSubcommand && !cliFlags.listModels && !cliFlags.web) {
|
||||
process.stderr.write('[gsd] Error: Interactive mode requires a terminal (TTY).\n')
|
||||
process.stderr.write('[gsd] Non-interactive alternatives:\n')
|
||||
process.stderr.write('[gsd] gsd --print "your message" Single-shot prompt\n')
|
||||
process.stderr.write('[gsd] gsd --mode rpc JSON-RPC over stdin/stdout\n')
|
||||
process.stderr.write('[gsd] gsd --mode mcp MCP server over stdin/stdout\n')
|
||||
process.stderr.write('[gsd] gsd --mode text "message" Text output mode\n')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
async function ensureRtkBootstrap(): Promise<void> {
|
||||
if ((ensureRtkBootstrap as { _done?: boolean })._done) return
|
||||
|
||||
|
|
@ -170,6 +155,28 @@ async function ensureRtkBootstrap(): Promise<void> {
|
|||
}
|
||||
}
|
||||
|
||||
// `gsd update` — update to the latest version via npm
|
||||
if (cliFlags.messages[0] === 'update') {
|
||||
const { runUpdate } = await import('./update-cmd.js')
|
||||
await runUpdate()
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
exitIfManagedResourcesAreNewer(agentDir)
|
||||
|
||||
// Early TTY check — must come before heavy initialization to avoid dangling
|
||||
// handles that prevent process.exit() from completing promptly.
|
||||
const hasSubcommand = cliFlags.messages.length > 0
|
||||
if (!process.stdin.isTTY && !isPrintMode && !hasSubcommand && !cliFlags.listModels && !cliFlags.web) {
|
||||
process.stderr.write('[gsd] Error: Interactive mode requires a terminal (TTY).\n')
|
||||
process.stderr.write('[gsd] Non-interactive alternatives:\n')
|
||||
process.stderr.write('[gsd] gsd --print "your message" Single-shot prompt\n')
|
||||
process.stderr.write('[gsd] gsd --mode rpc JSON-RPC over stdin/stdout\n')
|
||||
process.stderr.write('[gsd] gsd --mode mcp MCP server over stdin/stdout\n')
|
||||
process.stderr.write('[gsd] gsd --mode text "message" Text output mode\n')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// `gsd <subcommand> --help` — show subcommand-specific help
|
||||
const subcommand = cliFlags.messages[0]
|
||||
if (subcommand && process.argv.includes('--help')) {
|
||||
|
|
@ -199,13 +206,6 @@ if (cliFlags.messages[0] === 'config') {
|
|||
process.exit(0)
|
||||
}
|
||||
|
||||
// `gsd update` — update to the latest version via npm
|
||||
if (cliFlags.messages[0] === 'update') {
|
||||
const { runUpdate } = await import('./update-cmd.js')
|
||||
await runUpdate()
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
// `gsd web stop [path|all]` — stop web server before anything else
|
||||
if (cliFlags.messages[0] === 'web' && cliFlags.messages[1] === 'stop') {
|
||||
const webFlags = parseWebCliArgs(process.argv)
|
||||
|
|
@ -688,4 +688,3 @@ const interactiveMode = new InteractiveMode(session)
|
|||
markStartup('InteractiveMode')
|
||||
printStartupTimings()
|
||||
await interactiveMode.run()
|
||||
|
||||
|
|
|
|||
|
|
@ -28,6 +28,116 @@ interface ExtensionUIRequest {
|
|||
|
||||
export type { ExtensionUIRequest }
|
||||
|
||||
/** Context passed alongside an event for richer formatting. */
|
||||
export interface ProgressContext {
|
||||
verbose: boolean
|
||||
toolDuration?: number // ms, for tool_execution_end
|
||||
lastCost?: { costUsd: number; inputTokens: number; outputTokens: number }
|
||||
thinkingPreview?: string // accumulated LLM text to show before tool calls
|
||||
isError?: boolean // tool execution ended with an error
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ANSI Color Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const _c = {
|
||||
reset: '\x1b[0m',
|
||||
bold: '\x1b[1m',
|
||||
dim: '\x1b[2m',
|
||||
italic: '\x1b[3m',
|
||||
red: '\x1b[31m',
|
||||
green: '\x1b[32m',
|
||||
yellow: '\x1b[33m',
|
||||
cyan: '\x1b[36m',
|
||||
gray: '\x1b[90m',
|
||||
}
|
||||
|
||||
/** Build a no-op color map (all codes empty). */
|
||||
function noColor(): typeof _c {
|
||||
const nc: Record<string, string> = {}
|
||||
for (const k of Object.keys(_c)) nc[k] = ''
|
||||
return nc as typeof _c
|
||||
}
|
||||
|
||||
const colorsDisabled = !!process.env['NO_COLOR'] || !process.stderr.isTTY
|
||||
const c: typeof _c = colorsDisabled ? noColor() : _c
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tool-Arg Summarizer
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Produce a short human-readable summary of tool arguments.
|
||||
* Returns a string like "path/to/file.ts" or "grep pattern *.ts" — never the
|
||||
* full JSON blob.
|
||||
*/
|
||||
export function summarizeToolArgs(toolName: unknown, toolInput: unknown): string {
|
||||
const name = String(toolName ?? '')
|
||||
const input = (toolInput && typeof toolInput === 'object') ? toolInput as Record<string, unknown> : {}
|
||||
|
||||
switch (name) {
|
||||
case 'Read':
|
||||
case 'read':
|
||||
return shortPath(input.file_path) || ''
|
||||
case 'Write':
|
||||
case 'write':
|
||||
return shortPath(input.file_path) || ''
|
||||
case 'Edit':
|
||||
case 'edit':
|
||||
return shortPath(input.file_path) || ''
|
||||
case 'Bash':
|
||||
case 'bash': {
|
||||
const cmd = String(input.command ?? '')
|
||||
return cmd.length > 80 ? cmd.slice(0, 77) + '...' : cmd
|
||||
}
|
||||
case 'Glob':
|
||||
case 'glob':
|
||||
return String(input.pattern ?? '')
|
||||
case 'Grep':
|
||||
case 'grep':
|
||||
case 'Search':
|
||||
case 'search': {
|
||||
const pat = String(input.pattern ?? '')
|
||||
const g = input.glob ? ` ${input.glob}` : ''
|
||||
return `${pat}${g}`
|
||||
}
|
||||
case 'Task':
|
||||
case 'task': {
|
||||
const desc = String(input.description ?? input.prompt ?? '')
|
||||
return desc.length > 60 ? desc.slice(0, 57) + '...' : desc
|
||||
}
|
||||
default: {
|
||||
// Fallback: show first string-valued key up to 60 chars
|
||||
for (const v of Object.values(input)) {
|
||||
if (typeof v === 'string' && v.length > 0) {
|
||||
return v.length > 60 ? v.slice(0, 57) + '...' : v
|
||||
}
|
||||
}
|
||||
return ''
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function shortPath(p: unknown): string {
|
||||
if (typeof p !== 'string') return ''
|
||||
// Strip common CWD prefix to save space
|
||||
const cwd = process.cwd()
|
||||
if (p.startsWith(cwd + '/')) return p.slice(cwd.length + 1)
|
||||
// Strip /Users/*/Developer/ prefix
|
||||
return p.replace(/^\/Users\/[^/]+\/Developer\//, '')
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Format Duration
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
const s = (ms / 1000).toFixed(1)
|
||||
return `${s}s`
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Extension UI Auto-Responder
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
@ -78,55 +188,78 @@ export function handleExtensionUIRequest(
|
|||
// Progress Formatter
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export function formatProgress(event: Record<string, unknown>, verbose: boolean): string | null {
|
||||
export function formatProgress(event: Record<string, unknown>, ctx: ProgressContext): string | null {
|
||||
const type = String(event.type ?? '')
|
||||
|
||||
// Emit accumulated thinking preview before tool calls
|
||||
if (ctx.thinkingPreview) {
|
||||
// thinkingPreview is handled by the caller in headless.ts — it prepends
|
||||
// the thinking line before the current event's line. We return the thinking
|
||||
// line as a prefix joined with newline.
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case 'tool_execution_start': {
|
||||
if (!ctx.verbose) return null
|
||||
const name = String(event.toolName ?? 'unknown')
|
||||
const summary = summarizeToolArgs(name, event.args as Record<string, unknown> | undefined)
|
||||
return summary ? ` [tool] ${name} ${summary}` : ` [tool] ${name}`
|
||||
const args = summarizeToolArgs(event.toolName, event.args)
|
||||
const argStr = args ? ` ${c.dim}${args}${c.reset}` : ''
|
||||
return ` ${c.dim}[tool]${c.reset} ${name}${argStr}`
|
||||
}
|
||||
|
||||
case 'tool_execution_end': {
|
||||
if (verbose) {
|
||||
const name = String(event.toolName ?? 'unknown')
|
||||
const isError = Boolean(event.isError)
|
||||
return isError ? ` [tool] ${name} ✗ error` : null
|
||||
if (!ctx.verbose) return null
|
||||
const name = String(event.toolName ?? 'unknown')
|
||||
const durationStr = ctx.toolDuration != null ? ` ${c.dim}${formatDuration(ctx.toolDuration)}${c.reset}` : ''
|
||||
if (ctx.isError) {
|
||||
return ` ${c.red}[tool] ${name} error${c.reset}${durationStr}`
|
||||
}
|
||||
// In non-verbose, only surface errors
|
||||
if (event.isError) {
|
||||
const name = String(event.toolName ?? 'unknown')
|
||||
return ` [tool] ${name} ✗ error`
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
case 'cost_update': {
|
||||
const cumCost = event.cumulativeCost as Record<string, unknown> | undefined
|
||||
const costUsd = Number(cumCost?.costUsd ?? 0)
|
||||
if (costUsd > 0) {
|
||||
const tokens = event.tokens as Record<string, number> | undefined
|
||||
const inK = tokens ? (tokens.input / 1000).toFixed(1) : '?'
|
||||
const outK = tokens ? (tokens.output / 1000).toFixed(1) : '?'
|
||||
return ` [cost] $${costUsd.toFixed(4)} (${inK}k in / ${outK}k out)`
|
||||
}
|
||||
return null
|
||||
return ` ${c.dim}[tool] ${name} done${c.reset}${durationStr}`
|
||||
}
|
||||
|
||||
case 'agent_start':
|
||||
return '[agent] Session started'
|
||||
return `${c.dim}[agent] Session started${c.reset}`
|
||||
|
||||
case 'agent_end':
|
||||
return '[agent] Session ended'
|
||||
|
||||
case 'extension_ui_request':
|
||||
if (event.method === 'notify') {
|
||||
const msg = String(event.message ?? '')
|
||||
return msg ? `[gsd] ${msg}` : null
|
||||
case 'agent_end': {
|
||||
let line = `${c.dim}[agent] Session ended${c.reset}`
|
||||
if (ctx.lastCost) {
|
||||
const cost = `$${ctx.lastCost.costUsd.toFixed(4)}`
|
||||
const tokens = `${ctx.lastCost.inputTokens + ctx.lastCost.outputTokens} tokens`
|
||||
line += ` ${c.dim}(${cost}, ${tokens})${c.reset}`
|
||||
}
|
||||
// setStatus / setWidget are TUI-specific — suppress in text mode
|
||||
return line
|
||||
}
|
||||
|
||||
case 'extension_ui_request': {
|
||||
const method = String(event.method ?? '')
|
||||
|
||||
if (method === 'notify') {
|
||||
const msg = String(event.message ?? '')
|
||||
if (!msg) return null
|
||||
// Bold important notifications
|
||||
const isImportant = /^(committed:|verification gate:|milestone|blocked:)/i.test(msg)
|
||||
return isImportant
|
||||
? `${c.bold}[gsd] ${msg}${c.reset}`
|
||||
: `[gsd] ${msg}`
|
||||
}
|
||||
|
||||
if (method === 'setStatus') {
|
||||
// Parse statusKey for phase transitions
|
||||
const statusKey = String(event.statusKey ?? '')
|
||||
const msg = String(event.message ?? '')
|
||||
if (!statusKey && !msg) return null // suppress empty status lines
|
||||
// Show meaningful phase transitions
|
||||
if (statusKey) {
|
||||
const label = parsePhaseLabel(statusKey, msg)
|
||||
if (label) return `${c.cyan}[phase] ${label}${c.reset}`
|
||||
}
|
||||
// Fallback: show message if non-empty
|
||||
if (msg) return `${c.cyan}[phase] ${msg}${c.reset}`
|
||||
return null
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
default:
|
||||
return null
|
||||
|
|
@ -134,40 +267,52 @@ export function formatProgress(event: Record<string, unknown>, verbose: boolean)
|
|||
}
|
||||
|
||||
/**
|
||||
* Extract a short summary from tool arguments for display.
|
||||
* Returns null if nothing useful can be summarized.
|
||||
* Format a thinking preview line from accumulated LLM text deltas.
|
||||
*/
|
||||
function summarizeToolArgs(toolName: string, args: Record<string, unknown> | undefined): string | null {
|
||||
if (!args) return null
|
||||
export function formatThinkingLine(text: string): string {
|
||||
const trimmed = text.replace(/\s+/g, ' ').trim()
|
||||
const truncated = trimmed.length > 120 ? trimmed.slice(0, 117) + '...' : trimmed
|
||||
return `${c.dim}${c.italic}[thinking] ${truncated}${c.reset}`
|
||||
}
|
||||
|
||||
switch (toolName) {
|
||||
case 'Read':
|
||||
case 'read':
|
||||
return args.path ? String(args.path) : null
|
||||
case 'Write':
|
||||
case 'write':
|
||||
return args.path ? String(args.path) : null
|
||||
case 'Edit':
|
||||
case 'edit':
|
||||
return args.path ? String(args.path) : null
|
||||
case 'Bash':
|
||||
case 'bash': {
|
||||
const cmd = String(args.command ?? '')
|
||||
return cmd.length > 80 ? cmd.slice(0, 77) + '...' : cmd || null
|
||||
}
|
||||
case 'Grep':
|
||||
case 'grep':
|
||||
return args.pattern ? `/${args.pattern}/` + (args.path ? ` in ${args.path}` : '') : null
|
||||
case 'find':
|
||||
return args.pattern ? String(args.pattern) + (args.path ? ` in ${args.path}` : '') : null
|
||||
case 'lsp':
|
||||
return args.action ? String(args.action) + (args.symbol ? ` ${args.symbol}` : '') : null
|
||||
default: {
|
||||
// For GSD tools, show the first string arg that looks like an ID or path
|
||||
const first = Object.values(args).find(v => typeof v === 'string' && String(v).length < 80)
|
||||
return first ? String(first) : null
|
||||
/**
|
||||
* Format a cost line (used for periodic cost updates in verbose mode).
|
||||
*/
|
||||
export function formatCostLine(costUsd: number, inputTokens: number, outputTokens: number): string {
|
||||
return `${c.dim}[cost] $${costUsd.toFixed(4)} (${inputTokens + outputTokens} tokens)${c.reset}`
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Phase Label Parser
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Parse a statusKey into a human-readable phase label.
|
||||
* statusKey format varies but common patterns:
|
||||
* "milestone:M1", "slice:S1.1", "task:T1.1.1", "phase:discuss", etc.
|
||||
*/
|
||||
function parsePhaseLabel(statusKey: string, message: string): string | null {
|
||||
// Direct phase/milestone/slice/task keys
|
||||
const parts = statusKey.split(':')
|
||||
if (parts.length >= 2) {
|
||||
const [kind, value] = parts
|
||||
switch (kind.toLowerCase()) {
|
||||
case 'milestone':
|
||||
return `Milestone ${value}${message ? ' -- ' + message : ''}`
|
||||
case 'slice':
|
||||
return `Slice ${value}${message ? ' -- ' + message : ''}`
|
||||
case 'task':
|
||||
return `Task ${value}${message ? ' -- ' + message : ''}`
|
||||
case 'phase':
|
||||
return `Phase: ${value}${message ? ' -- ' + message : ''}`
|
||||
default:
|
||||
return `${kind}: ${value}${message ? ' -- ' + message : ''}`
|
||||
}
|
||||
}
|
||||
|
||||
// Single-word status keys with a message
|
||||
if (message) return `${statusKey}: ${message}`
|
||||
return statusKey || null
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
|
|||
|
|
@ -43,9 +43,10 @@ import { VALID_OUTPUT_FORMATS } from './headless-types.js'
|
|||
import {
|
||||
handleExtensionUIRequest,
|
||||
formatProgress,
|
||||
formatThinkingLine,
|
||||
startSupervisedStdinReader,
|
||||
} from './headless-ui.js'
|
||||
import type { ExtensionUIRequest } from './headless-ui.js'
|
||||
import type { ExtensionUIRequest, ProgressContext } from './headless-ui.js'
|
||||
|
||||
import {
|
||||
loadContext,
|
||||
|
|
@ -368,6 +369,11 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number):
|
|||
let cumulativeCacheWriteTokens = 0
|
||||
let lastSessionId: string | undefined
|
||||
|
||||
// Verbose text-mode state
|
||||
const toolStartTimes = new Map<string, number>()
|
||||
let lastCostData: { costUsd: number; inputTokens: number; outputTokens: number } | undefined
|
||||
let thinkingBuffer = ''
|
||||
|
||||
// Emit HeadlessJsonResult to stdout for --output-format json batch mode
|
||||
function emitBatchJsonResult(): void {
|
||||
if (options.outputFormat !== 'json') return
|
||||
|
|
@ -502,8 +508,65 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number):
|
|||
lastSessionId = String((eventObj as Record<string, unknown>).sessionId ?? '')
|
||||
}
|
||||
} else if (!options.json) {
|
||||
// Progress output to stderr
|
||||
const line = formatProgress(eventObj, !!options.verbose)
|
||||
// Progress output to stderr with verbose state tracking
|
||||
const eventType = String(eventObj.type ?? '')
|
||||
|
||||
// Track cost_update events for agent_end summary
|
||||
if (eventType === 'cost_update') {
|
||||
const data = eventObj as Record<string, unknown>
|
||||
const cumCost = data.cumulativeCost as Record<string, unknown> | undefined
|
||||
if (cumCost) {
|
||||
const tokens = data.tokens as Record<string, number> | undefined
|
||||
lastCostData = {
|
||||
costUsd: Number(cumCost.costUsd ?? 0),
|
||||
inputTokens: tokens?.input ?? 0,
|
||||
outputTokens: tokens?.output ?? 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Accumulate thinking text from message_update text_delta events
|
||||
if (eventType === 'message_update') {
|
||||
const ame = eventObj.assistantMessageEvent as Record<string, unknown> | undefined
|
||||
if (ame?.type === 'text_delta') {
|
||||
thinkingBuffer += String(ame.text ?? '')
|
||||
}
|
||||
}
|
||||
|
||||
// Track tool execution start timestamps
|
||||
if (eventType === 'tool_execution_start') {
|
||||
const toolCallId = String(eventObj.toolCallId ?? eventObj.id ?? '')
|
||||
if (toolCallId) toolStartTimes.set(toolCallId, Date.now())
|
||||
}
|
||||
|
||||
// Flush thinking buffer before tool calls or message end
|
||||
if (options.verbose && thinkingBuffer.trim() &&
|
||||
(eventType === 'tool_execution_start' || eventType === 'message_end')) {
|
||||
process.stderr.write(formatThinkingLine(thinkingBuffer) + '\n')
|
||||
thinkingBuffer = ''
|
||||
}
|
||||
|
||||
// Compute tool duration for tool_execution_end
|
||||
let toolDuration: number | undefined
|
||||
let isToolError = false
|
||||
if (eventType === 'tool_execution_end') {
|
||||
const toolCallId = String(eventObj.toolCallId ?? eventObj.id ?? '')
|
||||
const startTime = toolStartTimes.get(toolCallId)
|
||||
if (startTime) {
|
||||
toolDuration = Date.now() - startTime
|
||||
toolStartTimes.delete(toolCallId)
|
||||
}
|
||||
isToolError = eventObj.isError === true || eventObj.error != null
|
||||
}
|
||||
|
||||
const ctx: ProgressContext = {
|
||||
verbose: !!options.verbose,
|
||||
toolDuration,
|
||||
isError: isToolError,
|
||||
lastCost: eventType === 'agent_end' ? lastCostData : undefined,
|
||||
}
|
||||
|
||||
const line = formatProgress(eventObj, ctx)
|
||||
if (line) process.stderr.write(line + '\n')
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ import {
|
|||
loadManifest,
|
||||
pruneDeadProcesses,
|
||||
} from "./process-manager.js";
|
||||
import { formatUptime, resolveBgShellPersistenceCwd } from "./utilities.js";
|
||||
import { formatUptime, getBgShellLiveCwd, resolveBgShellPersistenceCwd } from "./utilities.js";
|
||||
import { formatTokenCount } from "../shared/format-utils.js";
|
||||
|
||||
import type { BgShellSharedState } from "./index.js";
|
||||
|
|
@ -213,7 +213,7 @@ export function registerBgShellLifecycle(pi: ExtensionAPI, state: BgShellSharedS
|
|||
return {
|
||||
render(width: number): string[] {
|
||||
// ── Line 1: pwd (branch) [session] ... bg status ──
|
||||
let pwd = process.cwd();
|
||||
let pwd = getBgShellLiveCwd(state.latestCtx?.cwd);
|
||||
const home = process.env.HOME || process.env.USERPROFILE;
|
||||
if (home && pwd.startsWith(home)) {
|
||||
pwd = `~${pwd.slice(home.length)}`;
|
||||
|
|
|
|||
|
|
@ -42,16 +42,51 @@ export function formatTimeAgo(timestamp: number): string {
|
|||
return formatDuration(Date.now() - timestamp) + " ago";
|
||||
}
|
||||
|
||||
function deriveProjectRootFromAutoWorktree(cachedCwd?: string): string | undefined {
|
||||
if (!cachedCwd) return undefined;
|
||||
const match = cachedCwd.match(/^(.*?)[\\/]\.gsd[\\/]worktrees[\\/][^\\/]+(?:[\\/].*)?$/);
|
||||
return match?.[1];
|
||||
}
|
||||
|
||||
export function getBgShellLiveCwd(
|
||||
cachedCwd?: string,
|
||||
pathExists: (path: string) => boolean = existsSync,
|
||||
getCwd: () => string = () => process.cwd(),
|
||||
chdir: (path: string) => void = (path) => process.chdir(path),
|
||||
): string {
|
||||
try {
|
||||
return getCwd();
|
||||
} catch {
|
||||
const projectRoot = deriveProjectRootFromAutoWorktree(cachedCwd);
|
||||
const home = process.env.HOME || process.env.USERPROFILE;
|
||||
const fallbacks = [projectRoot, cachedCwd, home, "/"].filter(
|
||||
(candidate): candidate is string => Boolean(candidate),
|
||||
);
|
||||
|
||||
for (const candidate of fallbacks) {
|
||||
if (candidate !== "/" && !pathExists(candidate)) continue;
|
||||
try {
|
||||
chdir(candidate);
|
||||
} catch {
|
||||
// Best-effort only. Returning a known-good fallback is enough to avoid crashes.
|
||||
}
|
||||
return candidate;
|
||||
}
|
||||
|
||||
return "/";
|
||||
}
|
||||
}
|
||||
|
||||
export function resolveBgShellPersistenceCwd(
|
||||
cachedCwd: string,
|
||||
liveCwd = process.cwd(),
|
||||
liveCwd: string | undefined = undefined,
|
||||
pathExists: (path: string) => boolean = existsSync,
|
||||
): string {
|
||||
const resolvedLiveCwd = liveCwd ?? getBgShellLiveCwd(cachedCwd, pathExists);
|
||||
const cachedIsAutoWorktree = /(?:^|[\\/])\.gsd[\\/]worktrees[\\/]/.test(cachedCwd);
|
||||
if (!cachedIsAutoWorktree) return cachedCwd;
|
||||
if (cachedCwd === liveCwd && pathExists(cachedCwd)) return cachedCwd;
|
||||
if (!pathExists(cachedCwd)) return liveCwd;
|
||||
if (liveCwd !== cachedCwd) return liveCwd;
|
||||
if (cachedCwd === resolvedLiveCwd && pathExists(cachedCwd)) return cachedCwd;
|
||||
if (!pathExists(cachedCwd)) return resolvedLiveCwd;
|
||||
if (resolvedLiveCwd !== cachedCwd) return resolvedLiveCwd;
|
||||
return cachedCwd;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -47,6 +47,12 @@ function shellEscapeSingle(value: string): string {
|
|||
return `'${value.replace(/'/g, `'\\''`)}'`;
|
||||
}
|
||||
|
||||
function hydrateProcessEnv(key: string, value: string): void {
|
||||
// Make newly collected secrets immediately visible to the current session.
|
||||
// Some extensions read process.env directly and do not reload .env on every call.
|
||||
process.env[key] = value;
|
||||
}
|
||||
|
||||
async function writeEnvKey(filePath: string, key: string, value: string): Promise<void> {
|
||||
let content = "";
|
||||
try {
|
||||
|
|
@ -312,6 +318,7 @@ async function applySecrets(
|
|||
try {
|
||||
await writeEnvKey(opts.envFilePath, key, value);
|
||||
applied.push(key);
|
||||
hydrateProcessEnv(key, value);
|
||||
} catch (err: any) {
|
||||
errors.push(`${key}: ${err.message}`);
|
||||
}
|
||||
|
|
@ -330,6 +337,7 @@ async function applySecrets(
|
|||
errors.push(`${key}: ${result.stderr.slice(0, 200)}`);
|
||||
} else {
|
||||
applied.push(key);
|
||||
hydrateProcessEnv(key, value);
|
||||
}
|
||||
} catch (err: any) {
|
||||
errors.push(`${key}: ${err.message}`);
|
||||
|
|
|
|||
|
|
@ -18,6 +18,26 @@ export interface ModelSelectionResult {
|
|||
routing: { tier: string; modelDowngraded: boolean } | null;
|
||||
}
|
||||
|
||||
export function resolvePreferredModelConfig(
|
||||
unitType: string,
|
||||
autoModeStartModel: { provider: string; id: string } | null,
|
||||
) {
|
||||
const explicitConfig = resolveModelWithFallbacksForUnit(unitType);
|
||||
if (explicitConfig) return explicitConfig;
|
||||
|
||||
const routingConfig = resolveDynamicRoutingConfig();
|
||||
if (!routingConfig.enabled || !routingConfig.tier_models) return undefined;
|
||||
|
||||
const ceilingModel = routingConfig.tier_models.heavy
|
||||
?? (autoModeStartModel ? `${autoModeStartModel.provider}/${autoModeStartModel.id}` : undefined);
|
||||
if (!ceilingModel) return undefined;
|
||||
|
||||
return {
|
||||
primary: ceilingModel,
|
||||
fallbacks: [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Select and apply the appropriate model for a unit dispatch.
|
||||
* Handles: per-unit-type model preferences, dynamic complexity routing,
|
||||
|
|
@ -36,7 +56,7 @@ export async function selectAndApplyModel(
|
|||
autoModeStartModel: { provider: string; id: string } | null,
|
||||
retryContext?: { isRetry: boolean; previousTier?: string },
|
||||
): Promise<ModelSelectionResult> {
|
||||
const modelConfig = resolveModelWithFallbacksForUnit(unitType);
|
||||
const modelConfig = resolvePreferredModelConfig(unitType, autoModeStartModel);
|
||||
let routing: { tier: string; modelDowngraded: boolean } | null = null;
|
||||
|
||||
if (modelConfig) {
|
||||
|
|
|
|||
|
|
@ -87,6 +87,11 @@ function buildSourceFilePaths(
|
|||
paths.push(`- **Decisions**: \`${relGsdRootFile("DECISIONS")}\``);
|
||||
}
|
||||
|
||||
const queuePath = resolveGsdRootFile(base, "QUEUE");
|
||||
if (existsSync(queuePath)) {
|
||||
paths.push(`- **Queue**: \`${relGsdRootFile("QUEUE")}\``);
|
||||
}
|
||||
|
||||
const contextPath = resolveMilestoneFile(base, mid, "CONTEXT");
|
||||
if (contextPath) {
|
||||
paths.push(`- **Milestone Context**: \`${relMilestoneFile(base, mid, "CONTEXT")}\``);
|
||||
|
|
@ -915,6 +920,16 @@ export async function buildPlanMilestonePrompt(mid: string, midTitle: string, ba
|
|||
const decisionsInline = await inlineDecisionsFromDb(base, mid, undefined, inlineLevel);
|
||||
if (decisionsInline) inlined.push(decisionsInline);
|
||||
}
|
||||
const queuePath = resolveGsdRootFile(base, "QUEUE");
|
||||
if (existsSync(queuePath)) {
|
||||
const queueInline = await inlineFileSmart(
|
||||
queuePath,
|
||||
relGsdRootFile("QUEUE"),
|
||||
"Project Queue",
|
||||
`${mid} ${midTitle}`,
|
||||
);
|
||||
inlined.push(queueInline);
|
||||
}
|
||||
const knowledgeInlinePM = await inlineGsdRootFile(base, "knowledge.md", "Project Knowledge");
|
||||
if (knowledgeInlinePM) inlined.push(knowledgeInlinePM);
|
||||
inlined.push(inlineTemplate("roadmap", "Roadmap"));
|
||||
|
|
|
|||
|
|
@ -131,6 +131,15 @@ export async function bootstrapAutoSession(
|
|||
return false;
|
||||
}
|
||||
|
||||
// Capture the user's session model before guided-flow dispatch can apply a
|
||||
// phase-specific planning model for a discuss turn (#2829).
|
||||
const startModelSnapshot = ctx.model
|
||||
? {
|
||||
provider: ctx.model.provider,
|
||||
id: ctx.model.id,
|
||||
}
|
||||
: null;
|
||||
|
||||
try {
|
||||
// Validate GSD_PROJECT_ID early so the user gets immediate feedback
|
||||
const customProjectId = process.env.GSD_PROJECT_ID;
|
||||
|
|
@ -576,12 +585,11 @@ export async function bootstrapAutoSession(
|
|||
// Initialize routing history
|
||||
initRoutingHistory(s.basePath);
|
||||
|
||||
// Capture session's model at auto-mode start (#650)
|
||||
const currentModel = ctx.model;
|
||||
if (currentModel) {
|
||||
// Restore the model that was active when auto bootstrap began (#650, #2829).
|
||||
if (startModelSnapshot) {
|
||||
s.autoModeStartModel = {
|
||||
provider: currentModel.provider,
|
||||
id: currentModel.id,
|
||||
provider: startModelSnapshot.provider,
|
||||
id: startModelSnapshot.id,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -65,6 +65,8 @@ import {
|
|||
} from "./native-git-bridge.js";
|
||||
|
||||
const gsdHome = process.env.GSD_HOME || join(homedir(), ".gsd");
|
||||
const PROJECT_PREFERENCES_FILE = "PREFERENCES.md";
|
||||
const LEGACY_PROJECT_PREFERENCES_FILE = "preferences.md";
|
||||
|
||||
// ─── Shared Constants & Helpers ─────────────────────────────────────────────
|
||||
|
||||
|
|
@ -82,7 +84,7 @@ const ROOT_STATE_FILES = [
|
|||
"QUEUE.md",
|
||||
"completed-units.json",
|
||||
"metrics.json",
|
||||
// NOTE: preferences.md is intentionally NOT in ROOT_STATE_FILES.
|
||||
// NOTE: project preferences are intentionally NOT in ROOT_STATE_FILES.
|
||||
// Forward-sync (main → worktree) is handled explicitly in syncGsdStateToWorktree().
|
||||
// Back-sync (worktree → main) must NEVER overwrite the project root's copy
|
||||
// because the project root is authoritative for preferences (#2684).
|
||||
|
|
@ -196,6 +198,11 @@ export function syncProjectRootToWorktree(
|
|||
const prGsd = join(projectRoot, ".gsd");
|
||||
const wtGsd = join(worktreePath_, ".gsd");
|
||||
|
||||
// When .gsd is a symlink to the same external directory in both locations,
|
||||
// cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL).
|
||||
// Compare realpaths and skip when they resolve to the same physical path (#2184).
|
||||
if (isSamePath(prGsd, wtGsd)) return;
|
||||
|
||||
// Copy milestone directory from project root to worktree — additive only.
|
||||
// force:false prevents cpSync from overwriting existing worktree files.
|
||||
// Without this, worktree-authoritative files (e.g. VALIDATION.md written
|
||||
|
|
@ -245,6 +252,11 @@ export function syncStateToProjectRoot(
|
|||
const wtGsd = join(worktreePath_, ".gsd");
|
||||
const prGsd = join(projectRoot, ".gsd");
|
||||
|
||||
// When .gsd is a symlink to the same external directory in both locations,
|
||||
// cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL).
|
||||
// Compare realpaths and skip when they resolve to the same physical path (#2184).
|
||||
if (isSamePath(wtGsd, prGsd)) return;
|
||||
|
||||
// 1. STATE.md — the quick-glance status used by initial deriveState()
|
||||
safeCopy(join(wtGsd, "STATE.md"), join(prGsd, "STATE.md"), { force: true });
|
||||
|
||||
|
|
@ -439,18 +451,25 @@ export function syncGsdStateToWorktree(
|
|||
}
|
||||
}
|
||||
|
||||
// Forward-sync preferences.md from project root to worktree (additive only).
|
||||
// NOT in ROOT_STATE_FILES because syncWorktreeStateBack() must never overwrite
|
||||
// the project root's preferences — the project root is authoritative (#2684).
|
||||
// Forward-sync project preferences from project root to worktree (additive only).
|
||||
// Prefer the canonical uppercase file name, but keep the legacy lowercase
|
||||
// fallback so older repos still work on case-sensitive filesystems.
|
||||
{
|
||||
const src = join(mainGsd, "preferences.md");
|
||||
const dst = join(wtGsd, "preferences.md");
|
||||
if (existsSync(src) && !existsSync(dst)) {
|
||||
try {
|
||||
cpSync(src, dst);
|
||||
synced.push("preferences.md");
|
||||
} catch {
|
||||
/* non-fatal */
|
||||
const worktreeHasPreferences = existsSync(join(wtGsd, PROJECT_PREFERENCES_FILE))
|
||||
|| existsSync(join(wtGsd, LEGACY_PROJECT_PREFERENCES_FILE));
|
||||
if (!worktreeHasPreferences) {
|
||||
for (const file of [PROJECT_PREFERENCES_FILE, LEGACY_PROJECT_PREFERENCES_FILE] as const) {
|
||||
const src = join(mainGsd, file);
|
||||
const dst = join(wtGsd, file);
|
||||
if (existsSync(src)) {
|
||||
try {
|
||||
cpSync(src, dst);
|
||||
synced.push(file);
|
||||
} catch {
|
||||
/* non-fatal */
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -985,11 +1004,25 @@ function copyPlanningArtifacts(srcBase: string, wtPath: string): void {
|
|||
"STATE.md",
|
||||
"KNOWLEDGE.md",
|
||||
"OVERRIDES.md",
|
||||
"preferences.md",
|
||||
]) {
|
||||
safeCopy(join(srcGsd, file), join(dstGsd, file), { force: true });
|
||||
}
|
||||
|
||||
// Seed canonical PREFERENCES.md when available; fall back to legacy lowercase.
|
||||
if (existsSync(join(srcGsd, PROJECT_PREFERENCES_FILE))) {
|
||||
safeCopy(
|
||||
join(srcGsd, PROJECT_PREFERENCES_FILE),
|
||||
join(dstGsd, PROJECT_PREFERENCES_FILE),
|
||||
{ force: true },
|
||||
);
|
||||
} else if (existsSync(join(srcGsd, LEGACY_PROJECT_PREFERENCES_FILE))) {
|
||||
safeCopy(
|
||||
join(srcGsd, LEGACY_PROJECT_PREFERENCES_FILE),
|
||||
join(dstGsd, LEGACY_PROJECT_PREFERENCES_FILE),
|
||||
{ force: true },
|
||||
);
|
||||
}
|
||||
|
||||
// Shared WAL (R012): worktrees use the project root's DB directly.
|
||||
// No longer copy gsd.db into the worktree — the DB path resolver in
|
||||
// ensureDbOpen() detects the worktree location and opens the root DB.
|
||||
|
|
|
|||
|
|
@ -45,6 +45,17 @@ export function _resolveReportBasePath(s: Pick<AutoSession, "originalBasePath" |
|
|||
return s.originalBasePath || s.basePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the authoritative project base for dispatch guards.
|
||||
* Prior-milestone completion lives at the project root, even when the active
|
||||
* unit is running inside an auto worktree.
|
||||
*/
|
||||
export function _resolveDispatchGuardBasePath(
|
||||
s: Pick<AutoSession, "originalBasePath" | "basePath">,
|
||||
): string {
|
||||
return s.originalBasePath || s.basePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate and write an HTML milestone report snapshot.
|
||||
* Extracted from the milestone-transition block in autoLoop.
|
||||
|
|
@ -667,9 +678,10 @@ export async function runDispatch(
|
|||
prompt = preDispatchResult.prompt;
|
||||
}
|
||||
|
||||
const guardBasePath = _resolveDispatchGuardBasePath(s);
|
||||
const priorSliceBlocker = deps.getPriorSliceCompletionBlocker(
|
||||
s.basePath,
|
||||
deps.getMainBranch(s.basePath),
|
||||
guardBasePath,
|
||||
deps.getMainBranch(guardBasePath),
|
||||
unitType,
|
||||
unitId,
|
||||
);
|
||||
|
|
@ -707,8 +719,17 @@ export async function runGuards(
|
|||
const budgetCeiling = prefs?.budget_ceiling;
|
||||
if (budgetCeiling !== undefined && budgetCeiling > 0) {
|
||||
const currentLedger = deps.getLedger() as { units: unknown } | null;
|
||||
const totalCost = currentLedger
|
||||
? deps.getProjectTotals(currentLedger.units).cost
|
||||
// In parallel worker mode, only count cost from the current auto-mode session
|
||||
// to avoid hitting the ceiling due to historical project-wide spend (#2184).
|
||||
let costUnits = currentLedger?.units;
|
||||
if (process.env.GSD_PARALLEL_WORKER && s.autoStartTime && Array.isArray(costUnits)) {
|
||||
const sessionStartISO = new Date(s.autoStartTime).toISOString();
|
||||
costUnits = costUnits.filter(
|
||||
(u: { startedAt?: string }) => u.startedAt != null && u.startedAt >= sessionStartISO,
|
||||
);
|
||||
}
|
||||
const totalCost = costUnits
|
||||
? deps.getProjectTotals(costUnits).cost
|
||||
: 0;
|
||||
const budgetPct = totalCost / budgetCeiling;
|
||||
const budgetAlertLevel = deps.getBudgetAlertLevel(budgetPct);
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import { pauseAutoForProviderError } from "../provider-error-pause.js";
|
|||
import { isSessionSwitchInFlight, resolveAgentEnd } from "../auto-loop.js";
|
||||
import { resolveModelId } from "../auto-model-selection.js";
|
||||
import { clearDiscussionFlowState } from "./write-gate.js";
|
||||
import { resumeAutoAfterProviderDelay } from "./provider-error-resume.js";
|
||||
import {
|
||||
classifyError,
|
||||
createRetryState,
|
||||
|
|
@ -44,10 +45,10 @@ async function pauseTransientWithBackoff(
|
|||
retryAfterMs,
|
||||
resume: allowAutoResume
|
||||
? () => {
|
||||
pi.sendMessage(
|
||||
{ customType: "gsd-auto-timeout-recovery", content: "Continue execution — provider error recovery delay elapsed.", display: false },
|
||||
{ triggerTurn: true },
|
||||
);
|
||||
void resumeAutoAfterProviderDelay(pi, ctx).catch((err) => {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
ctx.ui.notify(`Provider error recovery delay elapsed, but auto-mode failed to resume: ${message}`, "error");
|
||||
});
|
||||
}
|
||||
: undefined,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,53 @@
|
|||
import type {
|
||||
ExtensionAPI,
|
||||
ExtensionCommandContext,
|
||||
ExtensionContext,
|
||||
} from "@gsd/pi-coding-agent";
|
||||
|
||||
import { getAutoDashboardData, startAuto, type AutoDashboardData } from "../auto.js";
|
||||
|
||||
type AutoResumeSnapshot = Pick<AutoDashboardData, "active" | "paused" | "stepMode" | "basePath">;
|
||||
|
||||
export interface ProviderErrorResumeDeps {
|
||||
getSnapshot(): AutoResumeSnapshot;
|
||||
startAuto(
|
||||
ctx: ExtensionCommandContext,
|
||||
pi: ExtensionAPI,
|
||||
base: string,
|
||||
verboseMode: boolean,
|
||||
options?: { step?: boolean },
|
||||
): Promise<void>;
|
||||
}
|
||||
|
||||
const defaultDeps: ProviderErrorResumeDeps = {
|
||||
getSnapshot: () => getAutoDashboardData(),
|
||||
startAuto,
|
||||
};
|
||||
|
||||
export async function resumeAutoAfterProviderDelay(
|
||||
pi: ExtensionAPI,
|
||||
ctx: ExtensionContext,
|
||||
deps: ProviderErrorResumeDeps = defaultDeps,
|
||||
): Promise<"resumed" | "already-active" | "not-paused" | "missing-base"> {
|
||||
const snapshot = deps.getSnapshot();
|
||||
|
||||
if (snapshot.active) return "already-active";
|
||||
if (!snapshot.paused) return "not-paused";
|
||||
|
||||
if (!snapshot.basePath) {
|
||||
ctx.ui.notify(
|
||||
"Provider error recovery delay elapsed, but no paused auto-mode base path was available. Leaving auto-mode paused.",
|
||||
"warning",
|
||||
);
|
||||
return "missing-base";
|
||||
}
|
||||
|
||||
await deps.startAuto(
|
||||
ctx as ExtensionCommandContext,
|
||||
pi,
|
||||
snapshot.basePath,
|
||||
false,
|
||||
{ step: snapshot.stepMode },
|
||||
);
|
||||
return "resumed";
|
||||
}
|
||||
|
|
@ -9,14 +9,28 @@ import { registerJournalTools } from "./journal-tools.js";
|
|||
import { registerHooks } from "./register-hooks.js";
|
||||
import { registerShortcuts } from "./register-shortcuts.js";
|
||||
|
||||
export function handleRecoverableExtensionProcessError(err: Error): boolean {
|
||||
if ((err as NodeJS.ErrnoException).code === "EPIPE") {
|
||||
process.exit(0);
|
||||
}
|
||||
if ((err as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
const syscall = (err as NodeJS.ErrnoException).syscall;
|
||||
if (syscall?.startsWith("spawn")) {
|
||||
process.stderr.write(`[gsd] spawn ENOENT: ${(err as any).path ?? "unknown"} — command not found\n`);
|
||||
return true;
|
||||
}
|
||||
if (syscall === "uv_cwd") {
|
||||
process.stderr.write(`[gsd] ENOENT (${syscall}): ${err.message}\n`);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function installEpipeGuard(): void {
|
||||
if (!process.listeners("uncaughtException").some((listener) => listener.name === "_gsdEpipeGuard")) {
|
||||
const _gsdEpipeGuard = (err: Error): void => {
|
||||
if ((err as NodeJS.ErrnoException).code === "EPIPE") {
|
||||
process.exit(0);
|
||||
}
|
||||
if ((err as NodeJS.ErrnoException).code === "ENOENT" && (err as any).syscall?.startsWith("spawn")) {
|
||||
process.stderr.write(`[gsd] spawn ENOENT: ${(err as any).path ?? "unknown"} — command not found\n`);
|
||||
if (handleRecoverableExtensionProcessError(err)) {
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
|
|
@ -45,4 +59,3 @@ export function registerGsdExtension(pi: ExtensionAPI): void {
|
|||
registerShortcuts(pi);
|
||||
registerHooks(pi);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import type { ExtensionAPI } from "@gsd/pi-coding-agent";
|
|||
import { Key } from "@gsd/pi-tui";
|
||||
|
||||
import { GSDDashboardOverlay } from "../dashboard-overlay.js";
|
||||
import { ParallelMonitorOverlay } from "../parallel-monitor-overlay.js";
|
||||
import { shortcutDesc } from "../../shared/mod.js";
|
||||
|
||||
export function registerShortcuts(pi: ExtensionAPI): void {
|
||||
|
|
@ -29,4 +30,27 @@ export function registerShortcuts(pi: ExtensionAPI): void {
|
|||
);
|
||||
},
|
||||
});
|
||||
|
||||
pi.registerShortcut(Key.ctrlAlt("p"), {
|
||||
description: shortcutDesc("Open parallel worker monitor", "/gsd parallel watch"),
|
||||
handler: async (ctx) => {
|
||||
const parallelDir = join(process.cwd(), ".gsd", "parallel");
|
||||
if (!existsSync(parallelDir)) {
|
||||
ctx.ui.notify("No parallel workers found. Run /gsd parallel start first.", "info");
|
||||
return;
|
||||
}
|
||||
await ctx.ui.custom<void>(
|
||||
(tui, theme, _kb, done) => new ParallelMonitorOverlay(tui, theme, () => done()),
|
||||
{
|
||||
overlay: true,
|
||||
overlayOptions: {
|
||||
width: "90%",
|
||||
minWidth: 80,
|
||||
maxHeight: "92%",
|
||||
anchor: "center",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ export const TOP_LEVEL_SUBCOMMANDS: readonly GsdCommandDefinition[] = [
|
|||
{ cmd: "inspect", desc: "Show SQLite DB diagnostics" },
|
||||
{ cmd: "knowledge", desc: "Add persistent project knowledge (rule, pattern, or lesson)" },
|
||||
{ cmd: "new-milestone", desc: "Create a milestone from a specification document (headless)" },
|
||||
{ cmd: "parallel", desc: "Parallel milestone orchestration (start, status, stop, merge)" },
|
||||
{ cmd: "parallel", desc: "Parallel milestone orchestration (start, status, stop, merge, watch)" },
|
||||
{ cmd: "cmux", desc: "Manage cmux integration (status, sidebar, notifications, splits)" },
|
||||
{ cmd: "park", desc: "Park a milestone — skip without deleting" },
|
||||
{ cmd: "unpark", desc: "Reactivate a parked milestone" },
|
||||
|
|
@ -100,6 +100,7 @@ const NESTED_COMPLETIONS: CompletionMap = {
|
|||
{ cmd: "pause", desc: "Pause a specific worker" },
|
||||
{ cmd: "resume", desc: "Resume a paused worker" },
|
||||
{ cmd: "merge", desc: "Merge completed milestone branches" },
|
||||
{ cmd: "watch", desc: "Live TUI dashboard monitoring all workers" },
|
||||
],
|
||||
setup: [
|
||||
{ cmd: "llm", desc: "Configure LLM provider settings" },
|
||||
|
|
|
|||
|
|
@ -111,7 +111,25 @@ export async function handleParallelCommand(trimmed: string, _ctx: ExtensionComm
|
|||
return true;
|
||||
}
|
||||
|
||||
emitParallelMessage(pi, `Unknown parallel subcommand "${subcommand}". Usage: /gsd parallel [start|status|stop|pause|resume|merge]`);
|
||||
if (subcommand === "watch") {
|
||||
const root = projectRoot();
|
||||
const { ParallelMonitorOverlay } = await import("../../parallel-monitor-overlay.js");
|
||||
await _ctx.ui.custom<void>(
|
||||
(tui, theme, _kb, done) => new ParallelMonitorOverlay(tui, theme, () => done(), root),
|
||||
{
|
||||
overlay: true,
|
||||
overlayOptions: {
|
||||
width: "90%",
|
||||
minWidth: 80,
|
||||
maxHeight: "92%",
|
||||
anchor: "center",
|
||||
},
|
||||
},
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
emitParallelMessage(pi, `Unknown parallel subcommand "${subcommand}". Usage: /gsd parallel [start|status|stop|pause|resume|merge|watch]`);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -14,8 +14,7 @@ import { readFileSync, unlinkSync, existsSync } from "node:fs";
|
|||
import { join } from "node:path";
|
||||
import { gsdRoot } from "./paths.js";
|
||||
import { atomicWriteSync } from "./atomic-write.js";
|
||||
|
||||
const LOCK_FILE = "auto.lock";
|
||||
import { effectiveLockFile } from "./session-lock.js";
|
||||
|
||||
export interface LockData {
|
||||
pid: number;
|
||||
|
|
@ -28,7 +27,7 @@ export interface LockData {
|
|||
}
|
||||
|
||||
function lockPath(basePath: string): string {
|
||||
return join(gsdRoot(basePath), LOCK_FILE);
|
||||
return join(gsdRoot(basePath), effectiveLockFile());
|
||||
}
|
||||
|
||||
/** Write or update the lock file with current auto-mode state. */
|
||||
|
|
|
|||
497
src/resources/extensions/gsd/parallel-monitor-overlay.ts
Normal file
497
src/resources/extensions/gsd/parallel-monitor-overlay.ts
Normal file
|
|
@ -0,0 +1,497 @@
|
|||
/**
|
||||
* GSD Parallel Monitor Overlay
|
||||
*
|
||||
* Full-screen TUI overlay showing real-time parallel worker progress.
|
||||
* Opened via `/gsd parallel watch` or Ctrl+Alt+P.
|
||||
* Reads the same data sources as `scripts/parallel-monitor.mjs` but
|
||||
* renders as a native pi-tui overlay with theme integration.
|
||||
*/
|
||||
|
||||
import { existsSync, statSync, readFileSync, openSync, readSync, closeSync, readdirSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { spawnSync } from "node:child_process";
|
||||
|
||||
import type { Theme } from "@gsd/pi-coding-agent";
|
||||
import { truncateToWidth, visibleWidth, matchesKey, Key } from "@gsd/pi-tui";
|
||||
|
||||
import { formatDuration, STATUS_GLYPH, STATUS_COLOR } from "../shared/mod.js";
|
||||
|
||||
// ─── Types ────────────────────────────────────────────────────────────────
|
||||
|
||||
interface StatusJson {
|
||||
milestoneId: string;
|
||||
pid: number;
|
||||
state: string;
|
||||
cost: number;
|
||||
lastHeartbeat: number;
|
||||
startedAt: number;
|
||||
worktreePath: string;
|
||||
}
|
||||
|
||||
interface AutoLock {
|
||||
pid: number;
|
||||
startedAt: string;
|
||||
unitType: string;
|
||||
unitId: string;
|
||||
unitStartedAt: string;
|
||||
}
|
||||
|
||||
interface SliceProgress {
|
||||
id: string;
|
||||
status: string;
|
||||
total: number;
|
||||
done: number;
|
||||
}
|
||||
|
||||
interface WorkerView {
|
||||
mid: string;
|
||||
pid: number;
|
||||
alive: boolean;
|
||||
state: string;
|
||||
cost: number;
|
||||
heartbeatAge: number;
|
||||
currentUnit: string | null;
|
||||
unitType: string | null;
|
||||
unitElapsed: number;
|
||||
elapsed: number;
|
||||
totalTasks: number;
|
||||
doneTasks: number;
|
||||
totalSlices: number;
|
||||
doneSlices: number;
|
||||
slices: SliceProgress[];
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
// ─── Data Helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
function readJsonSafe<T>(filePath: string): T | null {
|
||||
try {
|
||||
return JSON.parse(readFileSync(filePath, "utf-8")) as T;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function isPidAlive(pid: number): boolean {
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function tailRead(filePath: string, maxBytes: number): string {
|
||||
try {
|
||||
const stat = statSync(filePath);
|
||||
const readSize = Math.min(stat.size, maxBytes);
|
||||
const fd = openSync(filePath, "r");
|
||||
const buf = Buffer.alloc(readSize);
|
||||
readSync(fd, buf, 0, readSize, Math.max(0, stat.size - readSize));
|
||||
closeSync(fd);
|
||||
return buf.toString("utf-8");
|
||||
} catch {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
function discoverWorkers(basePath: string): string[] {
|
||||
const parallelDir = join(basePath, ".gsd", "parallel");
|
||||
const worktreeDir = join(basePath, ".gsd", "worktrees");
|
||||
const mids = new Set<string>();
|
||||
|
||||
if (existsSync(parallelDir)) {
|
||||
try {
|
||||
for (const f of readdirSync(parallelDir)) {
|
||||
if (f.endsWith(".status.json")) mids.add(f.replace(".status.json", ""));
|
||||
const m = f.match(/^(M\d+)\.(stderr|stdout)\.log$/);
|
||||
if (m) mids.add(m[1]);
|
||||
}
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
|
||||
if (existsSync(worktreeDir)) {
|
||||
try {
|
||||
for (const d of readdirSync(worktreeDir)) {
|
||||
if (d.startsWith("M") && existsSync(join(worktreeDir, d, ".gsd", "auto.lock"))) {
|
||||
mids.add(d);
|
||||
}
|
||||
}
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
|
||||
return [...mids].sort();
|
||||
}
|
||||
|
||||
function querySliceProgress(basePath: string, mid: string): SliceProgress[] {
|
||||
const dbPath = join(basePath, ".gsd", "worktrees", mid, ".gsd", "gsd.db");
|
||||
if (!existsSync(dbPath)) return [];
|
||||
|
||||
try {
|
||||
const sql = `SELECT s.id, s.status, COUNT(t.id), SUM(CASE WHEN t.status='complete' THEN 1 ELSE 0 END) FROM slices s LEFT JOIN tasks t ON s.milestone_id=t.milestone_id AND s.id=t.slice_id WHERE s.milestone_id='${mid}' GROUP BY s.id ORDER BY s.id`;
|
||||
const result = spawnSync("sqlite3", [dbPath, sql], { timeout: 3000, encoding: "utf-8" });
|
||||
const out = (result.stdout || "").trim();
|
||||
if (!out || result.status !== 0) return [];
|
||||
return out.split("\n").map((line) => {
|
||||
const [id, status, total, done] = line.split("|");
|
||||
return { id, status, total: parseInt(total, 10), done: parseInt(done || "0", 10) };
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function extractCostFromNdjson(basePath: string, mid: string): number {
|
||||
const stdoutPath = join(basePath, ".gsd", "parallel", `${mid}.stdout.log`);
|
||||
if (!existsSync(stdoutPath)) return 0;
|
||||
try {
|
||||
const content = readFileSync(stdoutPath, "utf-8");
|
||||
let total = 0;
|
||||
for (const line of content.split("\n")) {
|
||||
if (!line.includes("message_end")) continue;
|
||||
try {
|
||||
const obj = JSON.parse(line);
|
||||
if (obj.type === "message_end") {
|
||||
const cost = obj.message?.usage?.cost?.total;
|
||||
if (typeof cost === "number") total += cost;
|
||||
}
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
return total;
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function queryRecentCompletions(basePath: string, mid: string): string[] {
|
||||
const dbPath = join(basePath, ".gsd", "worktrees", mid, ".gsd", "gsd.db");
|
||||
if (!existsSync(dbPath)) return [];
|
||||
try {
|
||||
const sql = `SELECT id, slice_id, one_liner FROM tasks WHERE milestone_id='${mid}' AND status='complete' AND completed_at IS NOT NULL ORDER BY completed_at DESC LIMIT 5`;
|
||||
const result = spawnSync("sqlite3", [dbPath, sql], { timeout: 3000, encoding: "utf-8" });
|
||||
const out = (result.stdout || "").trim();
|
||||
if (!out || result.status !== 0) return [];
|
||||
return out.split("\n").map((line) => {
|
||||
const [taskId, sliceId, oneLiner] = line.split("|");
|
||||
return `✓ ${mid}/${sliceId}/${taskId}${oneLiner ? ": " + oneLiner : ""}`;
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function collectWorkerData(basePath: string): WorkerView[] {
|
||||
const mids = discoverWorkers(basePath);
|
||||
const parallelDir = join(basePath, ".gsd", "parallel");
|
||||
const workers: WorkerView[] = [];
|
||||
|
||||
for (const mid of mids) {
|
||||
const status = readJsonSafe<StatusJson>(join(parallelDir, `${mid}.status.json`));
|
||||
const lock = readJsonSafe<AutoLock>(join(basePath, ".gsd", "worktrees", mid, ".gsd", "auto.lock"));
|
||||
const slices = querySliceProgress(basePath, mid);
|
||||
|
||||
const pid = lock?.pid || status?.pid || 0;
|
||||
const alive = pid ? isPidAlive(pid) : false;
|
||||
|
||||
// Heartbeat: prefer status.json if PID matches, else use file mtime
|
||||
let heartbeatAge = Infinity;
|
||||
const statusPidMatches = status?.pid === pid && status?.lastHeartbeat;
|
||||
if (statusPidMatches) {
|
||||
heartbeatAge = Date.now() - status!.lastHeartbeat;
|
||||
} else {
|
||||
const mtimes: number[] = [];
|
||||
const stdoutLog = join(parallelDir, `${mid}.stdout.log`);
|
||||
const stderrLog = join(parallelDir, `${mid}.stderr.log`);
|
||||
if (existsSync(stdoutLog)) mtimes.push(statSync(stdoutLog).mtimeMs);
|
||||
if (existsSync(stderrLog)) mtimes.push(statSync(stderrLog).mtimeMs);
|
||||
if (lock?.unitStartedAt) mtimes.push(new Date(lock.unitStartedAt).getTime());
|
||||
if (mtimes.length > 0) heartbeatAge = Date.now() - Math.max(...mtimes);
|
||||
}
|
||||
|
||||
let cost = status?.cost || 0;
|
||||
if (cost === 0) cost = extractCostFromNdjson(basePath, mid);
|
||||
|
||||
const totalTasks = slices.reduce((sum, s) => sum + s.total, 0);
|
||||
const doneTasks = slices.reduce((sum, s) => sum + s.done, 0);
|
||||
const doneSlices = slices.filter((s) => s.status === "complete").length;
|
||||
|
||||
const elapsed = status?.startedAt
|
||||
? Date.now() - status.startedAt
|
||||
: lock?.startedAt
|
||||
? Date.now() - new Date(lock.startedAt).getTime()
|
||||
: 0;
|
||||
|
||||
// Errors from stderr (last 4KB, only new content)
|
||||
const errors: string[] = [];
|
||||
const stderrLog = join(parallelDir, `${mid}.stderr.log`);
|
||||
if (existsSync(stderrLog)) {
|
||||
const content = tailRead(stderrLog, 4096);
|
||||
for (const line of content.trim().split("\n").slice(-5)) {
|
||||
if (line.includes("error") || line.includes("Error") || line.includes("exited")) {
|
||||
errors.push(line.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
workers.push({
|
||||
mid,
|
||||
pid,
|
||||
alive,
|
||||
state: alive ? "running" : (status?.state || "dead"),
|
||||
cost,
|
||||
heartbeatAge,
|
||||
currentUnit: lock?.unitId || null,
|
||||
unitType: lock?.unitType || null,
|
||||
unitElapsed: lock?.unitStartedAt ? Date.now() - new Date(lock.unitStartedAt).getTime() : 0,
|
||||
elapsed,
|
||||
totalTasks,
|
||||
doneTasks,
|
||||
totalSlices: slices.length,
|
||||
doneSlices,
|
||||
slices,
|
||||
errors,
|
||||
});
|
||||
}
|
||||
|
||||
return workers;
|
||||
}
|
||||
|
||||
// ─── Rendering Helpers ────────────────────────────────────────────────────
|
||||
|
||||
function unitTypeLabel(unitType: string | null): string {
|
||||
const labels: Record<string, string> = {
|
||||
"execute-task": "EXEC",
|
||||
"research-slice": "RSRCH",
|
||||
"plan-slice": "PLAN",
|
||||
"complete-slice": "DONE",
|
||||
"complete-task": "DONE",
|
||||
"reassess": "ASSESS",
|
||||
"validate": "VALID",
|
||||
"reassess-roadmap": "ASSESS",
|
||||
};
|
||||
return labels[unitType || ""] || (unitType || "---").toUpperCase().slice(0, 5);
|
||||
}
|
||||
|
||||
function progressBar(done: number, total: number, width: number): string {
|
||||
if (total === 0) return "░".repeat(width);
|
||||
const filled = Math.round((done / total) * width);
|
||||
return "█".repeat(filled) + "░".repeat(width - filled);
|
||||
}
|
||||
|
||||
function healthGlyph(alive: boolean, heartbeatAge: number): string {
|
||||
if (!alive) return "○";
|
||||
return "●";
|
||||
}
|
||||
|
||||
// ─── Overlay Class ────────────────────────────────────────────────────────
|
||||
|
||||
export class ParallelMonitorOverlay {
|
||||
private tui: { requestRender: () => void };
|
||||
private theme: Theme;
|
||||
private onClose: () => void;
|
||||
private basePath: string;
|
||||
private refreshTimer: ReturnType<typeof setInterval>;
|
||||
private workers: WorkerView[] = [];
|
||||
private events: string[] = [];
|
||||
private cachedLines?: string[];
|
||||
private scrollOffset = 0;
|
||||
private disposed = false;
|
||||
private resizeHandler: (() => void) | null = null;
|
||||
|
||||
constructor(
|
||||
tui: { requestRender: () => void },
|
||||
theme: Theme,
|
||||
onClose: () => void,
|
||||
basePath?: string,
|
||||
) {
|
||||
this.tui = tui;
|
||||
this.theme = theme;
|
||||
this.onClose = onClose;
|
||||
this.basePath = basePath || process.cwd();
|
||||
|
||||
this.resizeHandler = () => {
|
||||
if (this.disposed) return;
|
||||
this.invalidate();
|
||||
this.tui.requestRender();
|
||||
};
|
||||
process.stdout.on("resize", this.resizeHandler);
|
||||
|
||||
this.refresh();
|
||||
this.refreshTimer = setInterval(() => this.refresh(), 5000);
|
||||
}
|
||||
|
||||
private refresh(): void {
|
||||
if (this.disposed) return;
|
||||
this.workers = collectWorkerData(this.basePath);
|
||||
|
||||
// Collect completion events
|
||||
for (const wk of this.workers) {
|
||||
const completions = queryRecentCompletions(this.basePath, wk.mid);
|
||||
for (const evt of completions) {
|
||||
if (!this.events.includes(evt)) this.events.push(evt);
|
||||
}
|
||||
}
|
||||
this.events = this.events.slice(-10);
|
||||
|
||||
this.cachedLines = undefined;
|
||||
this.tui.requestRender();
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.disposed = true;
|
||||
clearInterval(this.refreshTimer);
|
||||
if (this.resizeHandler) {
|
||||
process.stdout.removeListener("resize", this.resizeHandler);
|
||||
this.resizeHandler = null;
|
||||
}
|
||||
}
|
||||
|
||||
handleInput(data: string): void {
|
||||
if (matchesKey(data, Key.escape) || data === "q") {
|
||||
this.dispose();
|
||||
this.onClose();
|
||||
return;
|
||||
}
|
||||
if (matchesKey(data, Key.down) || data === "j") {
|
||||
this.scrollOffset++;
|
||||
this.invalidate();
|
||||
this.tui.requestRender();
|
||||
return;
|
||||
}
|
||||
if (matchesKey(data, Key.up) || data === "k") {
|
||||
this.scrollOffset = Math.max(0, this.scrollOffset - 1);
|
||||
this.invalidate();
|
||||
this.tui.requestRender();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
invalidate(): void {
|
||||
this.cachedLines = undefined;
|
||||
}
|
||||
|
||||
render(width: number): string[] {
|
||||
if (this.cachedLines) return this.cachedLines;
|
||||
|
||||
const t = this.theme;
|
||||
const lines: string[] = [];
|
||||
const w = Math.max(width, 60);
|
||||
|
||||
// Header
|
||||
const totalCost = this.workers.reduce((s, wk) => s + wk.cost, 0);
|
||||
const aliveCount = this.workers.filter((wk) => wk.alive).length;
|
||||
const now = new Date().toLocaleTimeString();
|
||||
|
||||
lines.push(t.bold(t.fg("accent", " GSD Parallel Monitor ")));
|
||||
lines.push(
|
||||
t.fg("muted", ` ${now} │ ${aliveCount}/${this.workers.length} alive │ Total: `) +
|
||||
t.bold(`$${totalCost.toFixed(2)}`) +
|
||||
t.fg("muted", " │ 5s refresh"),
|
||||
);
|
||||
lines.push(t.fg("muted", "─".repeat(w)));
|
||||
|
||||
if (this.workers.length === 0) {
|
||||
lines.push("");
|
||||
lines.push(t.fg("warning", " No parallel workers found."));
|
||||
lines.push(t.fg("muted", " Run /gsd parallel start to begin."));
|
||||
} else {
|
||||
for (const wk of this.workers) {
|
||||
lines.push("");
|
||||
|
||||
// Health + ID + state
|
||||
const healthColor = wk.alive ? "success" : "error";
|
||||
const glyph = healthGlyph(wk.alive, wk.heartbeatAge);
|
||||
const stateText = wk.alive
|
||||
? t.fg("success", "RUNNING")
|
||||
: t.fg("error", t.bold("DEAD"));
|
||||
const heartbeatText = wk.heartbeatAge === Infinity
|
||||
? "never"
|
||||
: formatDuration(wk.heartbeatAge) + " ago";
|
||||
|
||||
lines.push(
|
||||
` ${t.fg(healthColor, glyph)} ${t.bold(wk.mid)} ${stateText} ` +
|
||||
t.fg("muted", `PID ${wk.pid} │ elapsed ${formatDuration(wk.elapsed)} │ `) +
|
||||
`cost ${t.bold("$" + wk.cost.toFixed(2))} ` +
|
||||
t.fg("muted", "│ heartbeat ") + t.fg(healthColor, heartbeatText),
|
||||
);
|
||||
|
||||
// Current unit
|
||||
if (wk.currentUnit) {
|
||||
const phaseColor =
|
||||
wk.unitType === "execute-task" ? "accent"
|
||||
: wk.unitType === "research-slice" ? "warning"
|
||||
: wk.unitType?.includes("complete") ? "success"
|
||||
: "text";
|
||||
lines.push(
|
||||
` ${t.fg("muted", "▸")} ${t.fg(phaseColor, unitTypeLabel(wk.unitType))} ${wk.currentUnit} ` +
|
||||
t.fg("muted", `(${formatDuration(wk.unitElapsed)})`),
|
||||
);
|
||||
} else if (!wk.alive) {
|
||||
lines.push(` ${t.fg("muted", "▸")} ${t.fg("error", "stopped")}`);
|
||||
} else {
|
||||
lines.push(` ${t.fg("muted", "▸ idle / between units")}`);
|
||||
}
|
||||
|
||||
// Slice progress chips
|
||||
if (wk.slices.length > 0) {
|
||||
const chips = wk.slices.map((s) => {
|
||||
const pct = s.total > 0 ? s.done / s.total : 0;
|
||||
const color = s.status === "complete" ? "success" : pct > 0 ? "warning" : "muted";
|
||||
return t.fg(color, `${s.id}:${s.done}/${s.total}`);
|
||||
});
|
||||
lines.push(` ${t.fg("muted", "slices")} ${chips.join(" ")}`);
|
||||
|
||||
// Task progress bar
|
||||
const bar = progressBar(wk.doneTasks, wk.totalTasks, 25);
|
||||
const pct = wk.totalTasks > 0 ? Math.round((wk.doneTasks / wk.totalTasks) * 100) : 0;
|
||||
lines.push(
|
||||
` ${t.fg("muted", "tasks")} ${t.fg("success", bar)} ${wk.doneTasks}/${wk.totalTasks} ` +
|
||||
t.fg("muted", `(${pct}%) │ slices done ${wk.doneSlices}/${wk.totalSlices}`),
|
||||
);
|
||||
}
|
||||
|
||||
// Errors
|
||||
for (const err of wk.errors.slice(-2)) {
|
||||
const truncated = err.length > w - 10 ? err.slice(0, w - 11) + "…" : err;
|
||||
lines.push(` ${t.fg("error", "⚠ " + truncated)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Event feed
|
||||
lines.push("");
|
||||
lines.push(t.fg("muted", "─".repeat(w)));
|
||||
lines.push(` ${t.bold("Recent Events")}`);
|
||||
|
||||
if (this.events.length === 0) {
|
||||
lines.push(t.fg("muted", " No events yet..."));
|
||||
} else {
|
||||
for (const evt of this.events.slice(-8)) {
|
||||
const mid = evt.match(/^✓ (M\d+)\//)?.[1] || "";
|
||||
const truncated = evt.length > w - 10 ? evt.slice(0, w - 11) + "…" : evt;
|
||||
lines.push(` ${t.fg("muted", "│")} ${t.fg("accent", mid)} ${truncated.replace(/^✓ M\d+\//, "")}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Footer
|
||||
lines.push("");
|
||||
const allDone = this.workers.length > 0 && this.workers.every((wk) => !wk.alive);
|
||||
if (allDone) {
|
||||
lines.push(t.bold(t.fg("success", " ALL WORKERS COMPLETE")));
|
||||
for (const wk of this.workers) {
|
||||
lines.push(
|
||||
` ${wk.mid} $${wk.cost.toFixed(2)} │ ${wk.doneSlices}/${wk.totalSlices} slices ` +
|
||||
`${wk.doneTasks}/${wk.totalTasks} tasks │ ${formatDuration(wk.elapsed)}`,
|
||||
);
|
||||
}
|
||||
lines.push(` ${t.bold("Total: $" + this.workers.reduce((s, wk) => s + wk.cost, 0).toFixed(2))}`);
|
||||
}
|
||||
lines.push(t.fg("muted", " ESC/q to close │ ↑↓ scroll"));
|
||||
|
||||
// Apply scroll — use terminal rows as height estimate
|
||||
const termHeight = process.stdout.rows || 40;
|
||||
const visible = lines.slice(this.scrollOffset, this.scrollOffset + termHeight);
|
||||
this.cachedLines = visible;
|
||||
return visible;
|
||||
}
|
||||
}
|
||||
|
|
@ -21,7 +21,7 @@ import { join, dirname } from "node:path";
|
|||
import { fileURLToPath } from "node:url";
|
||||
import { gsdRoot } from "./paths.js";
|
||||
import { createWorktree, worktreePath } from "./worktree-manager.js";
|
||||
import { autoWorktreeBranch, runWorktreePostCreateHook } from "./auto-worktree.js";
|
||||
import { autoWorktreeBranch, runWorktreePostCreateHook, syncGsdStateToWorktree } from "./auto-worktree.js";
|
||||
import { nativeBranchExists } from "./native-git-bridge.js";
|
||||
import { readIntegrationBranch } from "./git-service.js";
|
||||
import { resolveParallelConfig } from "./preferences.js";
|
||||
|
|
@ -507,6 +507,11 @@ function createMilestoneWorktree(basePath: string, milestoneId: string): string
|
|||
// Run post-create hook if configured
|
||||
runWorktreePostCreateHook(basePath, info.path);
|
||||
|
||||
// Copy .gsd/ planning artifacts (milestones, CONTEXT, ROADMAP, etc.) from the
|
||||
// project root into the worktree. Without this, workers for newly-planned
|
||||
// milestones can't find their roadmap and exit immediately (#2184 Bug 4).
|
||||
syncGsdStateToWorktree(basePath, info.path);
|
||||
|
||||
return info.path;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -83,10 +83,31 @@ let _lockAcquiredAt: number = 0;
|
|||
|
||||
const LOCK_FILE = "auto.lock";
|
||||
|
||||
/**
|
||||
* Derive the effective lock file name for the current process.
|
||||
* In parallel worker mode (GSD_PARALLEL_WORKER + GSD_MILESTONE_LOCK),
|
||||
* each worker uses a per-milestone lock file (`auto-<milestoneId>.lock`)
|
||||
* to avoid contending on the shared `.gsd/auto.lock` (#2184).
|
||||
*/
|
||||
export function effectiveLockFile(): string {
|
||||
const mid = process.env.GSD_PARALLEL_WORKER ? process.env.GSD_MILESTONE_LOCK : null;
|
||||
return mid ? `auto-${mid}.lock` : LOCK_FILE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Derive the OS-level lock target directory for the current process.
|
||||
* In parallel worker mode, uses `.gsd/parallel/<milestoneId>/` instead of
|
||||
* `.gsd/` so workers don't contend on the same proper-lockfile directory (#2184).
|
||||
*/
|
||||
export function effectiveLockTarget(gsdDir: string): string {
|
||||
const mid = process.env.GSD_PARALLEL_WORKER ? process.env.GSD_MILESTONE_LOCK : null;
|
||||
return mid ? join(gsdDir, "parallel", mid) : gsdDir;
|
||||
}
|
||||
|
||||
function lockPath(basePath: string): string {
|
||||
// If we have a snapshotted path from acquisition, use it for consistency
|
||||
if (_snapshotLockPath) return _snapshotLockPath;
|
||||
return join(gsdRoot(basePath), LOCK_FILE);
|
||||
return join(gsdRoot(basePath), effectiveLockFile());
|
||||
}
|
||||
|
||||
// ─── Stray Lock Cleanup ─────────────────────────────────────────────────────
|
||||
|
|
@ -265,14 +286,16 @@ export function acquireSessionLock(basePath: string): SessionLockResult {
|
|||
}
|
||||
|
||||
const gsdDir = gsdRoot(basePath);
|
||||
const lockTarget = effectiveLockTarget(gsdDir);
|
||||
|
||||
try {
|
||||
// Try to acquire an exclusive OS-level lock on the lock file.
|
||||
// We lock the directory (gsdRoot) since proper-lockfile works best
|
||||
// on directories, and the lock file itself may not exist yet.
|
||||
mkdirSync(gsdDir, { recursive: true });
|
||||
// Try to acquire an exclusive OS-level lock on the lock target.
|
||||
// We lock a directory since proper-lockfile works best on directories,
|
||||
// and the lock file itself may not exist yet.
|
||||
// In parallel worker mode, lockTarget is .gsd/parallel/<MID>/ (#2184).
|
||||
mkdirSync(lockTarget, { recursive: true });
|
||||
|
||||
const release = lockfile.lockSync(gsdDir, {
|
||||
const release = lockfile.lockSync(lockTarget, {
|
||||
realpath: false,
|
||||
stale: 1_800_000, // 30 minutes — safe for laptop sleep / long event loop stalls
|
||||
update: 10_000, // Update lock mtime every 10s to prove liveness
|
||||
|
|
@ -283,7 +306,7 @@ export function acquireSessionLock(basePath: string): SessionLockResult {
|
|||
|
||||
// Safety net: clean up lock dir on process exit if _releaseFunction
|
||||
// wasn't called (e.g., normal exit after clean completion) (#1245).
|
||||
ensureExitHandler(gsdDir);
|
||||
ensureExitHandler(lockTarget);
|
||||
|
||||
// Write the informational lock data
|
||||
atomicWriteSync(lp, JSON.stringify(lockData, null, 2));
|
||||
|
|
@ -298,12 +321,12 @@ export function acquireSessionLock(basePath: string): SessionLockResult {
|
|||
// If no lock file or no alive process, try to clean up and re-acquire (#1245)
|
||||
if (!existingData || (existingPid && !isPidAlive(existingPid))) {
|
||||
try {
|
||||
const lockDir = join(gsdDir + ".lock");
|
||||
const lockDir = join(lockTarget + ".lock");
|
||||
if (existsSync(lockDir)) rmSync(lockDir, { recursive: true, force: true });
|
||||
if (existsSync(lp)) unlinkSync(lp);
|
||||
|
||||
// Retry acquisition after cleanup
|
||||
const release = lockfile.lockSync(gsdDir, {
|
||||
const release = lockfile.lockSync(lockTarget, {
|
||||
realpath: false,
|
||||
stale: 1_800_000, // 30 minutes — match primary lock settings
|
||||
update: 10_000,
|
||||
|
|
@ -312,7 +335,7 @@ export function acquireSessionLock(basePath: string): SessionLockResult {
|
|||
assignLockState(basePath, release, lp);
|
||||
|
||||
// Safety net — uses centralized handler to avoid double-registration
|
||||
ensureExitHandler(gsdDir);
|
||||
ensureExitHandler(lockTarget);
|
||||
|
||||
atomicWriteSync(lp, JSON.stringify(lockData, null, 2));
|
||||
return { acquired: true };
|
||||
|
|
@ -483,13 +506,24 @@ export function releaseSessionLock(basePath: string): void {
|
|||
// Non-fatal
|
||||
}
|
||||
|
||||
// Remove the proper-lockfile directory (.gsd.lock/) for the current path
|
||||
// Remove the proper-lockfile directory for the current lock target.
|
||||
// In parallel worker mode, this is .gsd/parallel/<MID>.lock/ (#2184).
|
||||
const gsdDir = gsdRoot(basePath);
|
||||
const lockTarget = effectiveLockTarget(gsdDir);
|
||||
try {
|
||||
const lockDir = join(gsdRoot(basePath) + ".lock");
|
||||
const lockDir = join(lockTarget + ".lock");
|
||||
if (existsSync(lockDir)) rmSync(lockDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Non-fatal
|
||||
}
|
||||
// Also clean the per-milestone parallel directory itself if it exists
|
||||
if (lockTarget !== gsdDir) {
|
||||
try {
|
||||
if (existsSync(lockTarget)) rmSync(lockTarget, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Non-fatal
|
||||
}
|
||||
}
|
||||
|
||||
// Clean ALL registered lock paths (#1578) — lock files accumulate across
|
||||
// main project .gsd/, worktree .gsd/, and projects registry paths.
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@
|
|||
* research identified as critical for skill quality.
|
||||
*/
|
||||
|
||||
import { existsSync, readFileSync, readdirSync } from "node:fs";
|
||||
import { existsSync, readFileSync, readdirSync, statSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { homedir } from "node:os";
|
||||
import type { UnitMetrics, MetricsLedger } from "./metrics.js";
|
||||
|
|
@ -210,7 +210,7 @@ export function formatSkillDetail(basePath: string, skillName: string): string {
|
|||
// Check for SKILL.md existence
|
||||
const skillPath = join(homedir(), ".agents", "skills", skillName, "SKILL.md");
|
||||
if (existsSync(skillPath)) {
|
||||
const stat = require("node:fs").statSync(skillPath);
|
||||
const stat = statSync(skillPath);
|
||||
lines.push("");
|
||||
lines.push(`SKILL.md: ${skillPath}`);
|
||||
lines.push(`Last modified: ${stat.mtime.toISOString().slice(0, 10)}`);
|
||||
|
|
|
|||
139
src/resources/extensions/gsd/tests/auto-model-selection.test.ts
Normal file
139
src/resources/extensions/gsd/tests/auto-model-selection.test.ts
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
import { resolvePreferredModelConfig } from "../auto-model-selection.js";
|
||||
|
||||
function makeTempDir(prefix: string): string {
|
||||
return mkdtempSync(join(tmpdir(), prefix));
|
||||
}
|
||||
|
||||
test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models section is absent", () => {
|
||||
const originalCwd = process.cwd();
|
||||
const originalGsdHome = process.env.GSD_HOME;
|
||||
const tempProject = makeTempDir("gsd-routing-project-");
|
||||
const tempGsdHome = makeTempDir("gsd-routing-home-");
|
||||
|
||||
try {
|
||||
mkdirSync(join(tempProject, ".gsd"), { recursive: true });
|
||||
writeFileSync(
|
||||
join(tempProject, ".gsd", "PREFERENCES.md"),
|
||||
[
|
||||
"---",
|
||||
"dynamic_routing:",
|
||||
" enabled: true",
|
||||
" tier_models:",
|
||||
" light: claude-haiku-4-5",
|
||||
" standard: claude-sonnet-4-6",
|
||||
" heavy: claude-opus-4-6",
|
||||
"---",
|
||||
].join("\n"),
|
||||
"utf-8",
|
||||
);
|
||||
process.env.GSD_HOME = tempGsdHome;
|
||||
process.chdir(tempProject);
|
||||
|
||||
const config = resolvePreferredModelConfig("plan-slice", {
|
||||
provider: "anthropic",
|
||||
id: "claude-sonnet-4-6",
|
||||
});
|
||||
|
||||
assert.deepEqual(config, {
|
||||
primary: "claude-opus-4-6",
|
||||
fallbacks: [],
|
||||
});
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
if (originalGsdHome === undefined) delete process.env.GSD_HOME;
|
||||
else process.env.GSD_HOME = originalGsdHome;
|
||||
rmSync(tempProject, { recursive: true, force: true });
|
||||
rmSync(tempGsdHome, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("resolvePreferredModelConfig falls back to auto start model when heavy tier is absent", () => {
|
||||
const originalCwd = process.cwd();
|
||||
const originalGsdHome = process.env.GSD_HOME;
|
||||
const tempProject = makeTempDir("gsd-routing-project-");
|
||||
const tempGsdHome = makeTempDir("gsd-routing-home-");
|
||||
|
||||
try {
|
||||
mkdirSync(join(tempProject, ".gsd"), { recursive: true });
|
||||
writeFileSync(
|
||||
join(tempProject, ".gsd", "PREFERENCES.md"),
|
||||
[
|
||||
"---",
|
||||
"dynamic_routing:",
|
||||
" enabled: true",
|
||||
" tier_models:",
|
||||
" light: claude-haiku-4-5",
|
||||
" standard: claude-sonnet-4-6",
|
||||
"---",
|
||||
].join("\n"),
|
||||
"utf-8",
|
||||
);
|
||||
process.env.GSD_HOME = tempGsdHome;
|
||||
process.chdir(tempProject);
|
||||
|
||||
const config = resolvePreferredModelConfig("execute-task", {
|
||||
provider: "openai",
|
||||
id: "gpt-5.4",
|
||||
});
|
||||
|
||||
assert.deepEqual(config, {
|
||||
primary: "openai/gpt-5.4",
|
||||
fallbacks: [],
|
||||
});
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
if (originalGsdHome === undefined) delete process.env.GSD_HOME;
|
||||
else process.env.GSD_HOME = originalGsdHome;
|
||||
rmSync(tempProject, { recursive: true, force: true });
|
||||
rmSync(tempGsdHome, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", () => {
|
||||
const originalCwd = process.cwd();
|
||||
const originalGsdHome = process.env.GSD_HOME;
|
||||
const tempProject = makeTempDir("gsd-routing-project-");
|
||||
const tempGsdHome = makeTempDir("gsd-routing-home-");
|
||||
|
||||
try {
|
||||
mkdirSync(join(tempProject, ".gsd"), { recursive: true });
|
||||
writeFileSync(
|
||||
join(tempProject, ".gsd", "PREFERENCES.md"),
|
||||
[
|
||||
"---",
|
||||
"models:",
|
||||
" planning: claude-sonnet-4-6",
|
||||
"dynamic_routing:",
|
||||
" enabled: true",
|
||||
" tier_models:",
|
||||
" heavy: claude-opus-4-6",
|
||||
"---",
|
||||
].join("\n"),
|
||||
"utf-8",
|
||||
);
|
||||
process.env.GSD_HOME = tempGsdHome;
|
||||
process.chdir(tempProject);
|
||||
|
||||
const config = resolvePreferredModelConfig("plan-slice", {
|
||||
provider: "anthropic",
|
||||
id: "claude-opus-4-6",
|
||||
});
|
||||
|
||||
assert.deepEqual(config, {
|
||||
primary: "claude-sonnet-4-6",
|
||||
fallbacks: [],
|
||||
});
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
if (originalGsdHome === undefined) delete process.env.GSD_HOME;
|
||||
else process.env.GSD_HOME = originalGsdHome;
|
||||
rmSync(tempProject, { recursive: true, force: true });
|
||||
rmSync(tempGsdHome, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
const sourcePath = join(import.meta.dirname, "..", "auto-start.ts");
|
||||
const source = readFileSync(sourcePath, "utf-8");
|
||||
|
||||
test("bootstrapAutoSession snapshots ctx.model before guided-flow entry (#2829)", () => {
|
||||
const snapshotIdx = source.indexOf("const startModelSnapshot = ctx.model");
|
||||
assert.ok(snapshotIdx > -1, "auto-start.ts should snapshot ctx.model at bootstrap start");
|
||||
|
||||
const firstDiscussIdx = source.indexOf('await showSmartEntry(ctx, pi, base, { step: requestedStepMode });');
|
||||
assert.ok(firstDiscussIdx > -1, "auto-start.ts should route through showSmartEntry during guided flow");
|
||||
|
||||
assert.ok(
|
||||
snapshotIdx < firstDiscussIdx,
|
||||
"auto-start.ts must capture the start model before guided-flow can mutate ctx.model",
|
||||
);
|
||||
});
|
||||
|
||||
test("bootstrapAutoSession restores autoModeStartModel from the early snapshot (#2829)", () => {
|
||||
const assignmentIdx = source.indexOf("s.autoModeStartModel = {");
|
||||
assert.ok(assignmentIdx > -1, "auto-start.ts should assign autoModeStartModel");
|
||||
|
||||
const snapshotRefIdx = source.indexOf("provider: startModelSnapshot.provider", assignmentIdx);
|
||||
assert.ok(snapshotRefIdx > -1, "autoModeStartModel should be restored from startModelSnapshot");
|
||||
});
|
||||
|
|
@ -227,6 +227,45 @@ test("collectSecretsFromManifest: manifest statuses are updated after collection
|
|||
"KEY_TO_SKIP should have status 'skipped' after user skipped it");
|
||||
});
|
||||
|
||||
test("collectSecretsFromManifest: applied keys hydrate process.env for the running session", async (t) => {
|
||||
const { collectSecretsFromManifest } = await loadOrchestrator();
|
||||
|
||||
const tmp = makeTempDir("manifest-live-env");
|
||||
const envKey = "CONTEXT7_API_KEY";
|
||||
const saved = process.env[envKey];
|
||||
t.after(() => {
|
||||
if (saved === undefined) delete process.env[envKey];
|
||||
else process.env[envKey] = saved;
|
||||
rmSync(tmp, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
delete process.env[envKey];
|
||||
|
||||
const manifest = makeManifest([
|
||||
{ key: envKey, status: "pending" },
|
||||
]);
|
||||
await writeManifestFile(tmp, manifest);
|
||||
|
||||
let callIndex = 0;
|
||||
const mockCtx = {
|
||||
cwd: tmp,
|
||||
hasUI: true,
|
||||
ui: {
|
||||
custom: async (_factory: any) => {
|
||||
callIndex++;
|
||||
if (callIndex <= 1) return null; // summary screen dismiss
|
||||
return "c7_live_test_key";
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await collectSecretsFromManifest(tmp, "M001", mockCtx as any);
|
||||
|
||||
assert.ok(result.applied.includes(envKey), "CONTEXT7_API_KEY should be applied");
|
||||
assert.equal(process.env[envKey], "c7_live_test_key",
|
||||
"applied keys should be available through process.env without restarting");
|
||||
});
|
||||
|
||||
// ─── showSecretsSummary: render output ────────────────────────────────────────
|
||||
|
||||
test("showSecretsSummary: produces lines with correct status glyphs for each entry status", async () => {
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ import {
|
|||
isInAutoWorktree,
|
||||
getAutoWorktreeOriginalBase,
|
||||
mergeMilestoneToMain,
|
||||
} from "../auto-worktree.ts";
|
||||
} from "../../auto-worktree.ts";
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
|
|
@ -78,9 +78,9 @@ function createMilestoneArtifacts(dir: string, mid: string): void {
|
|||
// ─── Source-level: verify the merge code exists in the "all complete" path ────
|
||||
|
||||
test("auto-loop 'all milestones complete' path merges before stopping (#962)", () => {
|
||||
const loopSrc = readFileSync(join(__dirname, "..", "auto", "phases.ts"), "utf-8");
|
||||
const loopSrc = readFileSync(join(__dirname, "../..", "auto", "phases.ts"), "utf-8");
|
||||
const resolverSrc = readFileSync(
|
||||
join(__dirname, "..", "worktree-resolver.ts"),
|
||||
join(__dirname, "../..", "worktree-resolver.ts"),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
|
|
@ -9,7 +9,7 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { runGSDDoctor } from "../doctor.ts";
|
||||
import { runGSDDoctor } from "../../doctor.ts";
|
||||
|
||||
function makeTmp(name: string): string {
|
||||
const dir = join(tmpdir(), `atomic-closeout-${name}-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
||||
|
|
@ -4,7 +4,7 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
|
|||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
import { runGSDDoctor, selectDoctorScope, filterDoctorIssues } from "../doctor.js";
|
||||
import { runGSDDoctor, selectDoctorScope, filterDoctorIssues } from "../../doctor.js";
|
||||
|
||||
test("auto-preflight scopes to active milestone, ignoring historical", async (t) => {
|
||||
const tmpBase = mkdtempSync(join(tmpdir(), "gsd-auto-preflight-test-"));
|
||||
|
|
@ -11,19 +11,19 @@ import {
|
|||
diagnoseExpectedArtifact,
|
||||
buildLoopRemediationSteps,
|
||||
hasImplementationArtifacts,
|
||||
} from "../auto-recovery.ts";
|
||||
import { parseRoadmap, parsePlan } from "../parsers-legacy.ts";
|
||||
import { parseTaskPlanFile, clearParseCache } from "../files.ts";
|
||||
import { invalidateAllCaches } from "../cache.ts";
|
||||
import { deriveState, invalidateStateCache } from "../state.ts";
|
||||
} from "../../auto-recovery.ts";
|
||||
import { parseRoadmap, parsePlan } from "../../parsers-legacy.ts";
|
||||
import { parseTaskPlanFile, clearParseCache } from "../../files.ts";
|
||||
import { invalidateAllCaches } from "../../cache.ts";
|
||||
import { deriveState, invalidateStateCache } from "../../state.ts";
|
||||
import {
|
||||
openDatabase,
|
||||
closeDatabase,
|
||||
insertMilestone,
|
||||
insertSlice,
|
||||
insertTask,
|
||||
} from "../gsd-db.ts";
|
||||
import { renderPlanFromDb } from "../markdown-renderer.ts";
|
||||
} from "../../gsd-db.ts";
|
||||
import { renderPlanFromDb } from "../../markdown-renderer.ts";
|
||||
|
||||
function makeTmpBase(): string {
|
||||
const base = join(tmpdir(), `gsd-test-${randomUUID()}`);
|
||||
|
|
@ -16,8 +16,8 @@ import assert from 'node:assert/strict';
|
|||
import { mkdirSync, writeFileSync, readFileSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { getManifestStatus } from '../files.ts';
|
||||
import { collectSecretsFromManifest } from '../../get-secrets-from-user.ts';
|
||||
import { getManifestStatus } from '../../files.ts';
|
||||
import { collectSecretsFromManifest } from '../../../get-secrets-from-user.ts';
|
||||
|
||||
function makeTempDir(prefix: string): string {
|
||||
const dir = join(tmpdir(), `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
||||
|
|
@ -12,8 +12,8 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import { execSync } from "node:child_process";
|
||||
|
||||
import { createAutoWorktree, mergeMilestoneToMain } from "../auto-worktree.ts";
|
||||
import { nativeMergeSquash } from "../native-git-bridge.ts";
|
||||
import { createAutoWorktree, mergeMilestoneToMain } from "../../auto-worktree.ts";
|
||||
import { nativeMergeSquash } from "../../native-git-bridge.ts";
|
||||
|
||||
function run(cmd: string, cwd: string): string {
|
||||
return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim();
|
||||
|
|
@ -88,7 +88,7 @@ test("#2151 bug 1: auto-stash unblocks merge when unrelated files are dirty", ()
|
|||
});
|
||||
|
||||
test("#2151 bug 2: nativeMergeSquash returns dirty filenames", async () => {
|
||||
const { nativeMergeSquash } = await import("../native-git-bridge.ts");
|
||||
const { nativeMergeSquash } = await import("../../native-git-bridge.ts");
|
||||
const repo = createTempRepo();
|
||||
try {
|
||||
run("git checkout -b milestone/M210", repo);
|
||||
|
|
@ -21,9 +21,9 @@ import {
|
|||
createAutoWorktree,
|
||||
mergeMilestoneToMain,
|
||||
getAutoWorktreeOriginalBase,
|
||||
} from "../auto-worktree.ts";
|
||||
import { getSliceBranchName } from "../worktree.ts";
|
||||
import { nativeMergeSquash } from "../native-git-bridge.ts";
|
||||
} from "../../auto-worktree.ts";
|
||||
import { getSliceBranchName } from "../../worktree.ts";
|
||||
import { nativeMergeSquash } from "../../native-git-bridge.ts";
|
||||
|
||||
function run(cmd: string, cwd: string): string {
|
||||
// Safe: all inputs are hardcoded test strings, not user input
|
||||
|
|
@ -329,7 +329,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => {
|
|||
});
|
||||
|
||||
test("#1738 bug 1: nativeMergeSquash detects dirty working tree", async () => {
|
||||
const { nativeMergeSquash } = await import("../native-git-bridge.ts");
|
||||
const { nativeMergeSquash } = await import("../../native-git-bridge.ts");
|
||||
const repo = freshRepo();
|
||||
|
||||
run("git checkout -b milestone/M070", repo);
|
||||
|
|
@ -20,7 +20,7 @@ import {
|
|||
enterAutoWorktree,
|
||||
getAutoWorktreeOriginalBase,
|
||||
getActiveAutoWorktreeContext,
|
||||
} from "../auto-worktree.ts";
|
||||
} from "../../auto-worktree.ts";
|
||||
|
||||
// Note: execSync is used intentionally in tests for git operations with
|
||||
// controlled, hardcoded inputs (no user input). This is safe and matches
|
||||
|
|
@ -150,7 +150,7 @@ describe("auto-worktree lifecycle", () => {
|
|||
run("git commit -m \"add milestone\"", tempDir);
|
||||
|
||||
// Import createWorktree directly for manual worktree
|
||||
const { createWorktree } = await import("../worktree-manager.ts");
|
||||
const { createWorktree } = await import("../../worktree-manager.ts");
|
||||
|
||||
// Create manual worktree (uses worktree/<name> branch)
|
||||
const manualWt = createWorktree(tempDir, "feature-x");
|
||||
|
|
@ -164,7 +164,7 @@ describe("auto-worktree lifecycle", () => {
|
|||
|
||||
// Cleanup both
|
||||
teardownAutoWorktree(tempDir, "M003");
|
||||
const { removeWorktree } = await import("../worktree-manager.ts");
|
||||
const { removeWorktree } = await import("../../worktree-manager.ts");
|
||||
removeWorktree(tempDir, "feature-x");
|
||||
});
|
||||
|
||||
|
|
@ -190,7 +190,7 @@ describe("auto-worktree lifecycle", () => {
|
|||
run("git add .", tempDir);
|
||||
run("git commit -m \"add milestone\"", tempDir);
|
||||
|
||||
const { GitServiceImpl } = await import("../git-service.ts");
|
||||
const { GitServiceImpl } = await import("../../git-service.ts");
|
||||
|
||||
// Create worktree
|
||||
const wtPath = createAutoWorktree(tempDir, "M005");
|
||||
|
|
@ -215,7 +215,7 @@ describe("auto-worktree lifecycle", () => {
|
|||
run("git commit -m \"add milestone\"", tempDir);
|
||||
|
||||
// Simulate a crash leaving a stale directory with no .git file.
|
||||
const { worktreePath } = await import("../worktree-manager.ts");
|
||||
const { worktreePath } = await import("../../worktree-manager.ts");
|
||||
const staleDir = worktreePath(tempDir, "M010");
|
||||
mkdirSync(staleDir, { recursive: true });
|
||||
writeFileSync(join(staleDir, "orphan.txt"), "stale leftover\n");
|
||||
|
|
@ -12,7 +12,7 @@
|
|||
import { describe, it } from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
|
||||
import { computeBudgets } from "../context-budget.js";
|
||||
import { computeBudgets } from "../../context-budget.js";
|
||||
|
||||
// ─── Pure threshold / pipeline tests ──────────────────────────────────────────
|
||||
// These test the budget engine outputs that the continue-here monitor relies on.
|
||||
|
|
@ -164,7 +164,7 @@ describe("continue-here", () => {
|
|||
describe("continueHereFired runtime record field", () => {
|
||||
it("AutoUnitRuntimeRecord includes continueHereFired with default false", async (t) => {
|
||||
// Import writeUnitRuntimeRecord to verify the field is present and defaults
|
||||
const { writeUnitRuntimeRecord, readUnitRuntimeRecord, clearUnitRuntimeRecord } = await import("../unit-runtime.js");
|
||||
const { writeUnitRuntimeRecord, readUnitRuntimeRecord, clearUnitRuntimeRecord } = await import("../../unit-runtime.js");
|
||||
const fs = await import("node:fs");
|
||||
const path = await import("node:path");
|
||||
const os = await import("node:os");
|
||||
|
|
@ -202,7 +202,7 @@ describe("continue-here", () => {
|
|||
|
||||
describe("context-pressure monitor integration", () => {
|
||||
it("should fire wrap-up when context >= threshold and mark continueHereFired", async (t) => {
|
||||
const { writeUnitRuntimeRecord, readUnitRuntimeRecord, clearUnitRuntimeRecord } = await import("../unit-runtime.js");
|
||||
const { writeUnitRuntimeRecord, readUnitRuntimeRecord, clearUnitRuntimeRecord } = await import("../../unit-runtime.js");
|
||||
const fs = await import("node:fs");
|
||||
const path = await import("node:path");
|
||||
const os = await import("node:os");
|
||||
|
|
@ -10,7 +10,7 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { runGSDDoctor } from "../doctor.ts";
|
||||
import { runGSDDoctor } from "../../doctor.ts";
|
||||
|
||||
function makeTmp(name: string): string {
|
||||
const dir = join(tmpdir(), `doctor-deferral-${name}-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
||||
|
|
@ -10,7 +10,7 @@ import assert from "node:assert/strict";
|
|||
import { mkdtempSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
import { runGSDDoctor } from "../doctor.js";
|
||||
import { runGSDDoctor } from "../../doctor.js";
|
||||
|
||||
test("doctor fix=true sanitizes em-dash in milestone title", async (t) => {
|
||||
const tmpBase = mkdtempSync(join(tmpdir(), "gsd-doctor-delim-"));
|
||||
|
|
@ -4,8 +4,8 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync, existsSync } from "node:
|
|||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
import { runGSDDoctor } from "../doctor.js";
|
||||
import { formatDoctorReportJson } from "../doctor-format.js";
|
||||
import { runGSDDoctor } from "../../doctor.js";
|
||||
import { formatDoctorReportJson } from "../../doctor-format.js";
|
||||
// ── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function makeBase(): { base: string; gsd: string; mDir: string } {
|
||||
|
|
@ -230,7 +230,7 @@ describe('doctor-enhancements', async () => {
|
|||
const historyPath = join(gsd, "doctor-history.jsonl");
|
||||
assert.ok(existsSync(historyPath), "doctor-history.jsonl is created after run");
|
||||
|
||||
const { readDoctorHistory } = await import("../doctor.js");
|
||||
const { readDoctorHistory } = await import("../../doctor.js");
|
||||
const history = await readDoctorHistory(base);
|
||||
assert.ok(history.length >= 1, "history has at least one entry");
|
||||
assert.ok(typeof history[0]?.ts === "string", "history entry has ts field");
|
||||
|
|
@ -20,7 +20,7 @@ import {
|
|||
runEnvironmentChecks,
|
||||
environmentResultsToDoctorIssues,
|
||||
checkEnvironmentHealth,
|
||||
} from "../doctor-environment.ts";
|
||||
} from "../../doctor-environment.ts";
|
||||
/** Create a directory tree with files. */
|
||||
function createDir(files: Record<string, string> = {}): string {
|
||||
const dir = mkdtempSync(join(tmpdir(), "gsd-wt-env-"));
|
||||
|
|
@ -26,7 +26,7 @@ import {
|
|||
formatEnvironmentReport,
|
||||
checkEnvironmentHealth,
|
||||
type EnvironmentCheckResult,
|
||||
} from "../doctor-environment.ts";
|
||||
} from "../../doctor-environment.ts";
|
||||
function createProjectDir(files: Record<string, string> = {}): string {
|
||||
const dir = mkdtempSync(join(tmpdir(), "gsd-env-test-"));
|
||||
for (const [name, content] of Object.entries(files)) {
|
||||
|
|
@ -14,8 +14,8 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { runGSDDoctor } from "../doctor.ts";
|
||||
import { closeDatabase } from "../gsd-db.ts";
|
||||
import { runGSDDoctor } from "../../doctor.ts";
|
||||
import { closeDatabase } from "../../gsd-db.ts";
|
||||
|
||||
function makeTmp(name: string): string {
|
||||
const dir = join(tmpdir(), `doctor-fixlevel-${name}-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
||||
|
|
@ -15,7 +15,7 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import { execSync } from "node:child_process";
|
||||
|
||||
import { runGSDDoctor } from "../doctor.ts";
|
||||
import { runGSDDoctor } from "../../doctor.ts";
|
||||
function run(cmd: string, cwd: string): string {
|
||||
return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim();
|
||||
}
|
||||
|
|
@ -23,7 +23,7 @@ import {
|
|||
checkHealEscalation,
|
||||
resetProactiveHealing,
|
||||
formatHealthSummary,
|
||||
} from "../doctor-proactive.ts";
|
||||
} from "../../doctor-proactive.ts";
|
||||
function run(cmd: string, cwd: string): string {
|
||||
return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim();
|
||||
}
|
||||
|
|
@ -12,7 +12,7 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { runGSDDoctor } from "../doctor.ts";
|
||||
import { runGSDDoctor } from "../../doctor.ts";
|
||||
|
||||
function makeTmp(name: string): string {
|
||||
const dir = join(tmpdir(), `doctor-roadmap-summary-${name}-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
||||
|
|
@ -14,7 +14,7 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import { execSync } from "node:child_process";
|
||||
|
||||
import { runGSDDoctor } from "../doctor.ts";
|
||||
import { runGSDDoctor } from "../../doctor.ts";
|
||||
function run(cmd: string, cwd: string): string {
|
||||
return execSync(cmd, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim();
|
||||
}
|
||||
|
|
@ -4,7 +4,7 @@ import { mkdtempSync, mkdirSync, readFileSync, rmSync, writeFileSync, existsSync
|
|||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
import { formatDoctorReport, runGSDDoctor, summarizeDoctorIssues, filterDoctorIssues, selectDoctorScope, validateTitle } from "../doctor.js";
|
||||
import { formatDoctorReport, runGSDDoctor, summarizeDoctorIssues, filterDoctorIssues, selectDoctorScope, validateTitle } from "../../doctor.js";
|
||||
const tmpBase = mkdtempSync(join(tmpdir(), "gsd-doctor-test-"));
|
||||
const gsd = join(tmpBase, ".gsd");
|
||||
const mDir = join(gsd, "milestones", "M001");
|
||||
|
|
@ -34,11 +34,11 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import { stringify, parse } from "yaml";
|
||||
|
||||
import { CustomWorkflowEngine } from "../custom-workflow-engine.ts";
|
||||
import { CustomExecutionPolicy } from "../custom-execution-policy.ts";
|
||||
import { createRun, listRuns } from "../run-manager.ts";
|
||||
import { readGraph, writeGraph } from "../graph.ts";
|
||||
import { validateDefinition } from "../definition-loader.ts";
|
||||
import { CustomWorkflowEngine } from "../../custom-workflow-engine.ts";
|
||||
import { CustomExecutionPolicy } from "../../custom-execution-policy.ts";
|
||||
import { createRun, listRuns } from "../../run-manager.ts";
|
||||
import { readGraph, writeGraph } from "../../graph.ts";
|
||||
import { validateDefinition } from "../../definition-loader.ts";
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -26,10 +26,10 @@ import {
|
|||
createAutoWorktree,
|
||||
mergeMilestoneToMain,
|
||||
autoWorktreeBranch,
|
||||
} from "../auto-worktree.ts";
|
||||
import { captureIntegrationBranch, getSliceBranchName } from "../worktree.ts";
|
||||
import { writeIntegrationBranch, readIntegrationBranch } from "../git-service.ts";
|
||||
import { nextMilestoneId, generateMilestoneSuffix } from "../guided-flow.ts";
|
||||
} from "../../auto-worktree.ts";
|
||||
import { captureIntegrationBranch, getSliceBranchName } from "../../worktree.ts";
|
||||
import { writeIntegrationBranch, readIntegrationBranch } from "../../git-service.ts";
|
||||
import { nextMilestoneId, generateMilestoneSuffix } from "../../guided-flow.ts";
|
||||
|
||||
// ─── Helpers ────────────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -12,9 +12,9 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import { execFileSync } from "node:child_process";
|
||||
|
||||
import { GIT_NO_PROMPT_ENV } from "../git-constants.ts";
|
||||
import { nativeAddAllWithExclusions } from "../native-git-bridge.ts";
|
||||
import { RUNTIME_EXCLUSION_PATHS } from "../git-service.ts";
|
||||
import { GIT_NO_PROMPT_ENV } from "../../git-constants.ts";
|
||||
import { nativeAddAllWithExclusions } from "../../native-git-bridge.ts";
|
||||
import { RUNTIME_EXCLUSION_PATHS } from "../../git-service.ts";
|
||||
function git(cwd: string, ...args: string[]): string {
|
||||
return execFileSync("git", args, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim();
|
||||
}
|
||||
|
|
@ -101,7 +101,7 @@ describe('git-locale', async () => {
|
|||
// We verify indirectly: the source code must pass env: GIT_NO_PROMPT_ENV.
|
||||
// Read the source and check for the pattern. This is a static check.
|
||||
const src = readFileSync(
|
||||
join(import.meta.dirname, "..", "native-git-bridge.ts"),
|
||||
join(import.meta.dirname, "../..", "native-git-bridge.ts"),
|
||||
"utf-8"
|
||||
);
|
||||
|
||||
|
|
@ -14,7 +14,7 @@ import assert from "node:assert/strict";
|
|||
import {
|
||||
abortAndReset,
|
||||
formatGitError,
|
||||
} from "../git-self-heal.js";
|
||||
} from "../../git-self-heal.js";
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -20,8 +20,8 @@ import {
|
|||
type CommitOptions,
|
||||
type PreMergeCheckResult,
|
||||
type TaskCommitContext,
|
||||
} from "../git-service.ts";
|
||||
import { nativeAddAllWithExclusions } from "../native-git-bridge.ts";
|
||||
} from "../../git-service.ts";
|
||||
import { nativeAddAllWithExclusions } from "../../native-git-bridge.ts";
|
||||
function run(command: string, cwd: string): string {
|
||||
return execSync(command, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim();
|
||||
}
|
||||
|
|
@ -1113,7 +1113,7 @@ describe('git-service', async () => {
|
|||
// ─── untrackRuntimeFiles: removes tracked runtime files from index ───
|
||||
|
||||
test('untrackRuntimeFiles', async () => {
|
||||
const { untrackRuntimeFiles } = await import("../gitignore.ts");
|
||||
const { untrackRuntimeFiles } = await import("../../gitignore.ts");
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-untrack-"));
|
||||
runGit(repo, ["init", "-b", "main"]);
|
||||
runGit(repo, ["config", "user.email", "test@test.com"]);
|
||||
|
|
@ -1222,7 +1222,7 @@ describe('git-service', async () => {
|
|||
// ─── ensureGitignore: always adds .gsd to gitignore ──────────────────
|
||||
|
||||
test('ensureGitignore: adds .gsd entry', async () => {
|
||||
const { ensureGitignore } = await import("../gitignore.ts");
|
||||
const { ensureGitignore } = await import("../../gitignore.ts");
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-gitignore-external-state-"));
|
||||
|
||||
// Should add .gsd to gitignore (external state dir is a symlink)
|
||||
|
|
@ -22,8 +22,8 @@ import {
|
|||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
import { ensureGitignore, hasGitTrackedGsdFiles } from "../gitignore.ts";
|
||||
import { migrateToExternalState } from "../migrate-external.ts";
|
||||
import { ensureGitignore, hasGitTrackedGsdFiles } from "../../gitignore.ts";
|
||||
import { migrateToExternalState } from "../../migrate-external.ts";
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -7,7 +7,7 @@ import {
|
|||
writeBlockerPlaceholder,
|
||||
verifyExpectedArtifact,
|
||||
buildLoopRemediationSteps,
|
||||
} from "../auto-recovery.ts";
|
||||
} from "../../auto-recovery.ts";
|
||||
import { describe, test, beforeEach, afterEach } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
|
|
@ -299,7 +299,7 @@ test('writeBlockerPlaceholder: updates DB task status for execute-task (#2531)',
|
|||
const base = createFixtureBase();
|
||||
try {
|
||||
const { openDatabase, closeDatabase, insertMilestone, insertSlice, insertTask, getTask, isDbAvailable } =
|
||||
await import("../gsd-db.ts");
|
||||
await import("../../gsd-db.ts");
|
||||
|
||||
const dbPath = join(base, ".gsd", "gsd.db");
|
||||
// Create the tasks directory (required for artifact path resolution)
|
||||
|
|
@ -334,7 +334,7 @@ test('writeBlockerPlaceholder: does NOT update DB for non-execute-task types', a
|
|||
const base = createFixtureBase();
|
||||
try {
|
||||
const { openDatabase, closeDatabase, insertMilestone, insertSlice, getSlice, isDbAvailable } =
|
||||
await import("../gsd-db.ts");
|
||||
await import("../../gsd-db.ts");
|
||||
|
||||
const dbPath = join(base, ".gsd", "gsd.db");
|
||||
mkdirSync(join(base, ".gsd", "milestones", "M001", "slices", "S01"), { recursive: true });
|
||||
|
|
@ -24,7 +24,7 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import { execFileSync } from "node:child_process";
|
||||
|
||||
import { isInheritedRepo } from "../repo-identity.ts";
|
||||
import { isInheritedRepo } from "../../repo-identity.ts";
|
||||
|
||||
function run(cmd: string, args: string[], cwd: string): string {
|
||||
return execFileSync(cmd, args, {
|
||||
|
|
@ -12,15 +12,15 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync, readFileSync, appendFile
|
|||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { openDatabase, closeDatabase, isDbAvailable, _getAdapter } from '../gsd-db.ts';
|
||||
import { migrateFromMarkdown, parseDecisionsTable } from '../md-importer.ts';
|
||||
import { openDatabase, closeDatabase, isDbAvailable, _getAdapter } from '../../gsd-db.ts';
|
||||
import { migrateFromMarkdown, parseDecisionsTable } from '../../md-importer.ts';
|
||||
import {
|
||||
queryDecisions,
|
||||
queryRequirements,
|
||||
formatDecisionsForPrompt,
|
||||
formatRequirementsForPrompt,
|
||||
} from '../context-store.ts';
|
||||
import { saveDecisionToDb, generateDecisionsMd } from '../db-writer.ts';
|
||||
} from '../../context-store.ts';
|
||||
import { saveDecisionToDb, generateDecisionsMd } from '../../db-writer.ts';
|
||||
import { describe, test, beforeEach, afterEach } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
|
|
@ -11,15 +11,15 @@ import { execSync } from 'node:child_process';
|
|||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { deriveState } from '../state.ts';
|
||||
import { indexWorkspace } from '../workspace-index.ts';
|
||||
import { inlinePriorMilestoneSummary } from '../files.ts';
|
||||
import { getPriorSliceCompletionBlocker } from '../dispatch-guard.ts';
|
||||
import { deriveState } from '../../state.ts';
|
||||
import { indexWorkspace } from '../../workspace-index.ts';
|
||||
import { inlinePriorMilestoneSummary } from '../../files.ts';
|
||||
import { getPriorSliceCompletionBlocker } from '../../dispatch-guard.ts';
|
||||
import {
|
||||
getSliceBranchName,
|
||||
parseSliceBranch,
|
||||
} from '../worktree.ts';
|
||||
import { clearPathCache } from '../paths.ts';
|
||||
} from '../../worktree.ts';
|
||||
import { clearPathCache } from '../../paths.ts';
|
||||
import { describe, test, beforeEach, afterEach } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
|
|
@ -50,11 +50,11 @@ import {
|
|||
transaction,
|
||||
isDbAvailable,
|
||||
_getAdapter,
|
||||
} from "../gsd-db.ts";
|
||||
} from "../../gsd-db.ts";
|
||||
|
||||
// ── Tool handlers ─────────────────────────────────────────────────────────
|
||||
import { handleCompleteTask } from "../tools/complete-task.ts";
|
||||
import { handleCompleteSlice } from "../tools/complete-slice.ts";
|
||||
import { handleCompleteTask } from "../../tools/complete-task.ts";
|
||||
import { handleCompleteSlice } from "../../tools/complete-slice.ts";
|
||||
|
||||
// ── Markdown renderer ─────────────────────────────────────────────────────
|
||||
import {
|
||||
|
|
@ -63,32 +63,32 @@ import {
|
|||
renderAllFromDb,
|
||||
detectStaleRenders,
|
||||
repairStaleRenders,
|
||||
} from "../markdown-renderer.ts";
|
||||
} from "../../markdown-renderer.ts";
|
||||
|
||||
// ── State derivation ──────────────────────────────────────────────────────
|
||||
import {
|
||||
deriveStateFromDb,
|
||||
_deriveStateImpl,
|
||||
invalidateStateCache,
|
||||
} from "../state.ts";
|
||||
} from "../../state.ts";
|
||||
|
||||
// ── Auto-migration ───────────────────────────────────────────────────────
|
||||
import {
|
||||
migrateHierarchyToDb,
|
||||
migrateFromMarkdown,
|
||||
} from "../md-importer.ts";
|
||||
} from "../../md-importer.ts";
|
||||
|
||||
// ── Post-unit diagnostics ─────────────────────────────────────────────────
|
||||
import { detectRogueFileWrites } from "../auto-post-unit.ts";
|
||||
import { detectRogueFileWrites } from "../../auto-post-unit.ts";
|
||||
|
||||
// ── Doctor ────────────────────────────────────────────────────────────────
|
||||
import { runGSDDoctor } from "../doctor.ts";
|
||||
import { runGSDDoctor } from "../../doctor.ts";
|
||||
|
||||
// ── Undo/reset ────────────────────────────────────────────────────────────
|
||||
import { handleUndoTask, handleResetSlice } from "../undo.ts";
|
||||
import { handleUndoTask, handleResetSlice } from "../../undo.ts";
|
||||
|
||||
// ── Cache invalidation ───────────────────────────────────────────────────
|
||||
import { invalidateAllCaches } from "../cache.ts";
|
||||
import { invalidateAllCaches } from "../../cache.ts";
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Helpers
|
||||
|
|
@ -400,7 +400,7 @@ test("full lifecycle: migration through completion through doctor", async (t) =>
|
|||
writeFileSync(join(rogueDir, "T99-SUMMARY.md"), "# Rogue Summary\n", "utf-8");
|
||||
|
||||
// Clear path cache so resolveTaskFile sees the newly written file
|
||||
const { clearPathCache } = await import("../paths.ts");
|
||||
const { clearPathCache } = await import("../../paths.ts");
|
||||
clearPathCache();
|
||||
|
||||
const rogues = detectRogueFileWrites("execute-task", "M001/S01/T99", base);
|
||||
|
|
@ -458,7 +458,7 @@ test("recovery: DB loss → migrateFromMarkdown restores state, stale render det
|
|||
assert.equal(existsSync(dbPath), false, "DB file should be deleted");
|
||||
|
||||
// Clear path caches so gsdRoot re-probes after DB deletion
|
||||
const { clearPathCache: clearPaths } = await import("../paths.ts");
|
||||
const { clearPathCache: clearPaths } = await import("../../paths.ts");
|
||||
clearPaths();
|
||||
invalidateAllCaches();
|
||||
|
||||
|
|
@ -13,8 +13,8 @@ import {
|
|||
transformToGSD,
|
||||
generatePreview,
|
||||
writeGSDDirectory,
|
||||
} from '../migrate/index.ts';
|
||||
import { deriveState } from '../state.ts';
|
||||
} from '../../migrate/index.ts';
|
||||
import { deriveState } from '../../state.ts';
|
||||
import { describe, test, beforeEach, afterEach } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
|
|
@ -24,7 +24,7 @@ import {
|
|||
isInAutoWorktree,
|
||||
getAutoWorktreeOriginalBase,
|
||||
mergeMilestoneToMain,
|
||||
} from "../auto-worktree.ts";
|
||||
} from "../../auto-worktree.ts";
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
|
|
@ -124,7 +124,7 @@ test("worktree swap on milestone transition: merge old, create new", () => {
|
|||
|
||||
test("auto/phases.ts milestone transition block contains worktree lifecycle", () => {
|
||||
const phasesSrc = readFileSync(
|
||||
join(__dirname, "..", "auto", "phases.ts"),
|
||||
join(__dirname, "../..", "auto", "phases.ts"),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
|
|
@ -147,7 +147,7 @@ test("auto/phases.ts milestone transition block contains worktree lifecycle", ()
|
|||
|
||||
test("worktree-resolver mergeAndExit preserves branch when roadmap is missing (#1573)", () => {
|
||||
const resolverSrc = readFileSync(
|
||||
join(__dirname, "..", "worktree-resolver.ts"),
|
||||
join(__dirname, "../..", "worktree-resolver.ts"),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
|
|
@ -32,12 +32,12 @@ import {
|
|||
mergeAllCompleted,
|
||||
formatMergeResults,
|
||||
type MergeResult,
|
||||
} from "../parallel-merge.ts";
|
||||
import type { WorkerInfo } from "../parallel-orchestrator.ts";
|
||||
} from "../../parallel-merge.ts";
|
||||
import type { WorkerInfo } from "../../parallel-orchestrator.ts";
|
||||
import {
|
||||
writeSessionStatus,
|
||||
readSessionStatus,
|
||||
} from "../session-status-io.ts";
|
||||
} from "../../session-status-io.ts";
|
||||
|
||||
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -26,12 +26,12 @@ import {
|
|||
getWorkerBatches,
|
||||
hasActiveWorkers,
|
||||
resetWorkerRegistry,
|
||||
} from '../../subagent/worker-registry.ts';
|
||||
} from '../../../subagent/worker-registry.ts';
|
||||
import {
|
||||
getBudgetAlertLevel,
|
||||
getNewBudgetAlertLevel,
|
||||
getBudgetEnforcementAction,
|
||||
} from '../auto-budget.ts';
|
||||
} from '../../auto-budget.ts';
|
||||
import {
|
||||
type UnitMetrics,
|
||||
type MetricsLedger,
|
||||
|
|
@ -42,7 +42,7 @@ import {
|
|||
formatCostProjection,
|
||||
getAverageCostPerUnitType,
|
||||
predictRemainingCost,
|
||||
} from '../metrics.ts';
|
||||
} from '../../metrics.ts';
|
||||
|
||||
// ─── Fixture helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -5,7 +5,7 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import { spawnSync } from "node:child_process";
|
||||
|
||||
import { gsdRoot, _clearGsdRootCache } from "../paths.ts";
|
||||
import { gsdRoot, _clearGsdRootCache } from "../../paths.ts";
|
||||
/** Create a tmp dir and resolve symlinks + 8.3 short names (macOS /var→/private/var, Windows RUNNER~1→runneradmin). */
|
||||
function tmp(): string {
|
||||
const p = mkdtempSync(join(tmpdir(), "gsd-paths-test-"));
|
||||
|
|
@ -11,8 +11,8 @@
|
|||
|
||||
import { describe, it, before, after } from 'node:test';
|
||||
import assert from 'node:assert';
|
||||
import { PluginImporter, type DiscoveryResult, type ImportManifest } from '../plugin-importer.js';
|
||||
import { getMarketplaceFixtures } from './marketplace-test-fixtures.js';
|
||||
import { PluginImporter, type DiscoveryResult, type ImportManifest } from '../../plugin-importer.js';
|
||||
import { getMarketplaceFixtures } from '../marketplace-test-fixtures.ts';
|
||||
|
||||
// ============================================================================
|
||||
// Live Test Configuration
|
||||
|
|
@ -15,9 +15,9 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
|
|||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
import { buildExistingMilestonesContext } from "../guided-flow-queue.ts";
|
||||
import type { GSDState, MilestoneRegistryEntry } from "../types.ts";
|
||||
import { createTestContext } from "./test-helpers.ts";
|
||||
import { buildExistingMilestonesContext } from "../../guided-flow-queue.ts";
|
||||
import type { GSDState, MilestoneRegistryEntry } from "../../types.ts";
|
||||
import { createTestContext } from "../test-helpers.ts";
|
||||
|
||||
const { assertTrue, assertEq, report } = createTestContext();
|
||||
|
||||
|
|
@ -17,10 +17,10 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync, readFileSync, existsSync
|
|||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { deriveState, invalidateStateCache } from '../state.ts';
|
||||
import { findMilestoneIds } from '../guided-flow.ts';
|
||||
import { saveQueueOrder, loadQueueOrder } from '../queue-order.ts';
|
||||
import { parseContextDependsOn } from '../files.ts';
|
||||
import { deriveState, invalidateStateCache } from '../../state.ts';
|
||||
import { findMilestoneIds } from '../../guided-flow.ts';
|
||||
import { saveQueueOrder, loadQueueOrder } from '../../queue-order.ts';
|
||||
import { parseContextDependsOn } from '../../files.ts';
|
||||
// ─── Fixture Helpers ───────────────────────────────────────────────────────
|
||||
|
||||
function createFixtureBase(): string {
|
||||
|
|
@ -298,7 +298,7 @@ test('E2E: DB-backed path respects queue order (#2556)', async () => {
|
|||
// the dispatch guard (which respects queue order) blocked completion.
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const { openDatabase, closeDatabase, insertMilestone, isDbAvailable } = await import('../gsd-db.ts');
|
||||
const { openDatabase, closeDatabase, insertMilestone, isDbAvailable } = await import('../../gsd-db.ts');
|
||||
const dbPath = join(base, '.gsd', 'gsd.db');
|
||||
|
||||
// Create milestone directories (required for findMilestoneIds)
|
||||
|
|
@ -14,8 +14,8 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
import { execSync } from "node:child_process";
|
||||
|
||||
import { captureIntegrationBranch, getCurrentBranch } from "../worktree.ts";
|
||||
import { readIntegrationBranch, QUICK_BRANCH_RE } from "../git-service.ts";
|
||||
import { captureIntegrationBranch, getCurrentBranch } from "../../worktree.ts";
|
||||
import { readIntegrationBranch, QUICK_BRANCH_RE } from "../../git-service.ts";
|
||||
|
||||
function run(command: string, cwd: string): string {
|
||||
return execSync(command, { cwd, stdio: ["ignore", "pipe", "pipe"], encoding: "utf-8" }).trim();
|
||||
|
|
@ -139,7 +139,7 @@ test('cleanupQuickBranch: merges back and cleans up (same session)', async () =>
|
|||
// Import and call cleanupQuickBranch
|
||||
// Use dynamic import to get a fresh module scope — the in-memory state
|
||||
// won't be set, so it will fall through to disk recovery
|
||||
const { cleanupQuickBranch } = await import("../quick.ts");
|
||||
const { cleanupQuickBranch } = await import("../../quick.ts");
|
||||
const result = cleanupQuickBranch();
|
||||
|
||||
assert.ok(result, "cleanupQuickBranch returns true");
|
||||
|
|
@ -187,7 +187,7 @@ test('cleanupQuickBranch: recovers from disk state (cross-session)', async () =>
|
|||
|
||||
process.chdir(repo);
|
||||
|
||||
const { cleanupQuickBranch } = await import("../quick.ts");
|
||||
const { cleanupQuickBranch } = await import("../../quick.ts");
|
||||
const result = cleanupQuickBranch();
|
||||
|
||||
assert.ok(result, "cross-session recovery returns true");
|
||||
|
|
@ -207,7 +207,7 @@ test('cleanupQuickBranch: no-op without pending state', async () => {
|
|||
const origCwd = process.cwd();
|
||||
process.chdir(repo);
|
||||
|
||||
const { cleanupQuickBranch } = await import("../quick.ts");
|
||||
const { cleanupQuickBranch } = await import("../../quick.ts");
|
||||
const result = cleanupQuickBranch();
|
||||
|
||||
assert.ok(!result, "returns false when no pending state");
|
||||
|
|
@ -5,12 +5,12 @@ import { join, dirname } from 'node:path';
|
|||
import { tmpdir } from 'node:os';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { extractUatType } from '../files.ts';
|
||||
import { resolveSliceFile } from '../paths.ts';
|
||||
import { checkNeedsRunUat } from '../auto-prompts.ts';
|
||||
import { extractUatType } from '../../files.ts';
|
||||
import { resolveSliceFile } from '../../paths.ts';
|
||||
import { checkNeedsRunUat } from '../../auto-prompts.ts';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const worktreePromptsDir = join(__dirname, '..', 'prompts');
|
||||
const worktreePromptsDir = join(__dirname, '../..', 'prompts');
|
||||
|
||||
function loadPromptFromWorktree(name: string, vars: Record<string, string> = {}): string {
|
||||
const path = join(worktreePromptsDir, `${name}.md`);
|
||||
|
|
@ -10,14 +10,14 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync, readFileSync } from 'nod
|
|||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { openDatabase, closeDatabase } from '../gsd-db.ts';
|
||||
import { migrateFromMarkdown } from '../md-importer.ts';
|
||||
import { openDatabase, closeDatabase } from '../../gsd-db.ts';
|
||||
import { migrateFromMarkdown } from '../../md-importer.ts';
|
||||
import {
|
||||
queryDecisions,
|
||||
queryRequirements,
|
||||
formatDecisionsForPrompt,
|
||||
formatRequirementsForPrompt,
|
||||
} from '../context-store.ts';
|
||||
} from '../../context-store.ts';
|
||||
import { test } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
|
|
@ -18,10 +18,10 @@ import { execSync } from "node:child_process";
|
|||
import {
|
||||
createAutoWorktree,
|
||||
mergeMilestoneToMain,
|
||||
} from "../auto-worktree.ts";
|
||||
import { getSliceBranchName } from "../worktree.ts";
|
||||
import { abortAndReset } from "../git-self-heal.ts";
|
||||
import { runGSDDoctor } from "../doctor.ts";
|
||||
} from "../../auto-worktree.ts";
|
||||
import { getSliceBranchName } from "../../worktree.ts";
|
||||
import { abortAndReset } from "../../git-self-heal.ts";
|
||||
import { runGSDDoctor } from "../../doctor.ts";
|
||||
import { describe, test } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
|
|
@ -260,6 +260,61 @@ test("runDispatch emits dispatch-stop when dispatch returns stop action", async
|
|||
assert.equal(stopEvents[0].flowId, ic.flowId);
|
||||
});
|
||||
|
||||
test("runDispatch checks prior-slice completion against the project root in worktree mode", async () => {
|
||||
const capture = createEventCapture();
|
||||
const guardCalls: Array<{ fn: string; args: unknown[] }> = [];
|
||||
const deps = makeMockDeps(capture, {
|
||||
getMainBranch: (basePath: string) => {
|
||||
guardCalls.push({ fn: "getMainBranch", args: [basePath] });
|
||||
return "main";
|
||||
},
|
||||
getPriorSliceCompletionBlocker: (
|
||||
basePath: string,
|
||||
mainBranch: string,
|
||||
unitType: string,
|
||||
unitId: string,
|
||||
) => {
|
||||
guardCalls.push({
|
||||
fn: "getPriorSliceCompletionBlocker",
|
||||
args: [basePath, mainBranch, unitType, unitId],
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
const ic = makeIC(deps, {
|
||||
s: {
|
||||
...makeSession(),
|
||||
basePath: "/tmp/project/.gsd/worktrees/M029-xoklo9",
|
||||
originalBasePath: "/tmp/project",
|
||||
} as any,
|
||||
});
|
||||
const preData: PreDispatchData = {
|
||||
state: {
|
||||
phase: "executing",
|
||||
activeMilestone: { id: "M029-xoklo9", title: "Test", status: "active" },
|
||||
activeSlice: { id: "S01", title: "Slice 1" },
|
||||
registry: [{ id: "M029-xoklo9", status: "active" }],
|
||||
blockers: [],
|
||||
} as any,
|
||||
mid: "M029-xoklo9",
|
||||
midTitle: "Test Milestone",
|
||||
};
|
||||
|
||||
const result = await runDispatch(ic, preData, {
|
||||
recentUnits: [],
|
||||
stuckRecoveryAttempts: 0,
|
||||
});
|
||||
|
||||
assert.equal(result.action, "next");
|
||||
assert.deepEqual(guardCalls, [
|
||||
{ fn: "getMainBranch", args: ["/tmp/project"] },
|
||||
{
|
||||
fn: "getPriorSliceCompletionBlocker",
|
||||
args: ["/tmp/project", "main", "execute-task", "M001/S01/T01"],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("runUnitPhase emits unit-start and unit-end with causedBy reference", async () => {
|
||||
const capture = createEventCapture();
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,60 @@
|
|||
import { describe, it } from "node:test";
|
||||
import assert from "node:assert";
|
||||
|
||||
/**
|
||||
* Basic tests for the parallel monitor overlay data helpers.
|
||||
* The overlay is primarily a rendering component that reads existing
|
||||
* status files — these tests verify the helper logic in isolation.
|
||||
*/
|
||||
|
||||
describe("parallel-monitor-overlay", () => {
|
||||
it("progressBar generates correct width", async () => {
|
||||
// Dynamic import to test the module loads cleanly
|
||||
const mod = await import("../parallel-monitor-overlay.js");
|
||||
// Module should export the class
|
||||
assert.ok(mod.ParallelMonitorOverlay, "ParallelMonitorOverlay class should be exported");
|
||||
});
|
||||
|
||||
it("ParallelMonitorOverlay can be instantiated with mock tui", async () => {
|
||||
const mod = await import("../parallel-monitor-overlay.js");
|
||||
|
||||
let renderRequested = false;
|
||||
const mockTui = { requestRender: () => { renderRequested = true; } };
|
||||
const mockTheme = {
|
||||
fg: (_color: string, text: string) => text,
|
||||
bold: (text: string) => text,
|
||||
};
|
||||
let closed = false;
|
||||
|
||||
const overlay = new mod.ParallelMonitorOverlay(
|
||||
mockTui,
|
||||
mockTheme as any,
|
||||
() => { closed = true; },
|
||||
"/nonexistent/path", // basePath — no real data, tests empty state
|
||||
);
|
||||
|
||||
// Should render without throwing
|
||||
const lines = overlay.render(80);
|
||||
assert.ok(Array.isArray(lines), "render should return an array");
|
||||
assert.ok(lines.length > 0, "render should return at least one line");
|
||||
|
||||
// Should contain header text
|
||||
const joined = lines.join("\n");
|
||||
assert.ok(joined.includes("Parallel Monitor"), "should include title");
|
||||
assert.ok(joined.includes("No parallel workers found"), "should show empty state");
|
||||
|
||||
// Dispose should not throw
|
||||
overlay.dispose();
|
||||
|
||||
// handleInput with ESC should call onClose
|
||||
const overlay2 = new mod.ParallelMonitorOverlay(
|
||||
mockTui,
|
||||
mockTheme as any,
|
||||
() => { closed = true; },
|
||||
"/nonexistent/path",
|
||||
);
|
||||
overlay2.handleInput("q");
|
||||
assert.ok(closed, "pressing q should trigger onClose");
|
||||
overlay2.dispose();
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,226 @@
|
|||
/**
|
||||
* parallel-worker-lock-contention.test.ts — Regression tests for #2184.
|
||||
*
|
||||
* Covers all four bugs from the parallel worker contention issue:
|
||||
* Bug 1: Session lock contention — per-milestone lock isolation
|
||||
* Bug 2: Budget ceiling scoped to current session for parallel workers
|
||||
* Bug 3: syncProjectRootToWorktree skips when source === destination (symlinks)
|
||||
* Bug 4: createMilestoneWorktree copies planning artifacts
|
||||
*
|
||||
* Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||
*/
|
||||
|
||||
import {
|
||||
mkdtempSync,
|
||||
mkdirSync,
|
||||
writeFileSync,
|
||||
rmSync,
|
||||
existsSync,
|
||||
symlinkSync,
|
||||
readFileSync,
|
||||
} from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
import {
|
||||
acquireSessionLock,
|
||||
releaseSessionLock,
|
||||
effectiveLockFile,
|
||||
effectiveLockTarget,
|
||||
} from "../session-lock.ts";
|
||||
import { gsdRoot } from "../paths.ts";
|
||||
import {
|
||||
syncProjectRootToWorktree,
|
||||
syncStateToProjectRoot,
|
||||
} from "../auto-worktree.ts";
|
||||
import { writeLock, readCrashLock, clearLock } from "../crash-recovery.ts";
|
||||
import { describe, test, beforeEach, afterEach } from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
|
||||
// ─── Bug 1: Per-milestone lock isolation ──────────────────────────────────────
|
||||
|
||||
describe("parallel-worker-lock-contention (#2184)", () => {
|
||||
// Save and restore env vars between tests
|
||||
const savedEnv: Record<string, string | undefined> = {};
|
||||
|
||||
beforeEach(() => {
|
||||
savedEnv.GSD_PARALLEL_WORKER = process.env.GSD_PARALLEL_WORKER;
|
||||
savedEnv.GSD_MILESTONE_LOCK = process.env.GSD_MILESTONE_LOCK;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (savedEnv.GSD_PARALLEL_WORKER === undefined) {
|
||||
delete process.env.GSD_PARALLEL_WORKER;
|
||||
} else {
|
||||
process.env.GSD_PARALLEL_WORKER = savedEnv.GSD_PARALLEL_WORKER;
|
||||
}
|
||||
if (savedEnv.GSD_MILESTONE_LOCK === undefined) {
|
||||
delete process.env.GSD_MILESTONE_LOCK;
|
||||
} else {
|
||||
process.env.GSD_MILESTONE_LOCK = savedEnv.GSD_MILESTONE_LOCK;
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Bug 1a: effectiveLockFile returns per-milestone name ────────────────
|
||||
test("Bug 1a: effectiveLockFile returns auto.lock without parallel env", () => {
|
||||
delete process.env.GSD_PARALLEL_WORKER;
|
||||
delete process.env.GSD_MILESTONE_LOCK;
|
||||
assert.equal(effectiveLockFile(), "auto.lock");
|
||||
});
|
||||
|
||||
test("Bug 1a: effectiveLockFile returns auto-<MID>.lock in parallel mode", () => {
|
||||
process.env.GSD_PARALLEL_WORKER = "1";
|
||||
process.env.GSD_MILESTONE_LOCK = "M003";
|
||||
assert.equal(effectiveLockFile(), "auto-M003.lock");
|
||||
});
|
||||
|
||||
// ─── Bug 1b: effectiveLockTarget returns per-milestone directory ─────────
|
||||
test("Bug 1b: effectiveLockTarget returns gsdDir without parallel env", () => {
|
||||
delete process.env.GSD_PARALLEL_WORKER;
|
||||
const gsdDir = "/tmp/test/.gsd";
|
||||
assert.equal(effectiveLockTarget(gsdDir), gsdDir);
|
||||
});
|
||||
|
||||
test("Bug 1b: effectiveLockTarget returns parallel/<MID> in parallel mode", () => {
|
||||
process.env.GSD_PARALLEL_WORKER = "1";
|
||||
process.env.GSD_MILESTONE_LOCK = "M003";
|
||||
const gsdDir = "/tmp/test/.gsd";
|
||||
assert.equal(effectiveLockTarget(gsdDir), join(gsdDir, "parallel", "M003"));
|
||||
});
|
||||
|
||||
// ─── Bug 1c: Two parallel workers acquire independent locks ──────────────
|
||||
test("Bug 1c: parallel workers use per-milestone lock files, not shared auto.lock", () => {
|
||||
const base = mkdtempSync(join(tmpdir(), "gsd-parallel-lock-"));
|
||||
mkdirSync(join(base, ".gsd"), { recursive: true });
|
||||
|
||||
try {
|
||||
// Simulate worker for M001
|
||||
process.env.GSD_PARALLEL_WORKER = "1";
|
||||
process.env.GSD_MILESTONE_LOCK = "M001";
|
||||
|
||||
const r1 = acquireSessionLock(base);
|
||||
assert.ok(r1.acquired, "M001 worker acquires lock");
|
||||
|
||||
// Verify the lock file is per-milestone
|
||||
const gsdDir = gsdRoot(base);
|
||||
const m001LockFile = join(gsdDir, "auto-M001.lock");
|
||||
assert.ok(existsSync(m001LockFile), "auto-M001.lock exists");
|
||||
|
||||
// The shared auto.lock should NOT exist
|
||||
const sharedLockFile = join(gsdDir, "auto.lock");
|
||||
assert.ok(!existsSync(sharedLockFile), "shared auto.lock does NOT exist");
|
||||
|
||||
// The per-milestone lock target directory should exist
|
||||
const m001LockTarget = join(gsdDir, "parallel", "M001");
|
||||
assert.ok(existsSync(m001LockTarget), "parallel/M001 directory exists");
|
||||
|
||||
releaseSessionLock(base);
|
||||
|
||||
// After release, per-milestone lock file should be cleaned
|
||||
assert.ok(!existsSync(m001LockFile), "auto-M001.lock cleaned after release");
|
||||
} finally {
|
||||
delete process.env.GSD_PARALLEL_WORKER;
|
||||
delete process.env.GSD_MILESTONE_LOCK;
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Bug 1d: crash-recovery uses per-milestone lock file ─────────────────
|
||||
test("Bug 1d: crash-recovery writeLock/readCrashLock uses per-milestone lock in parallel mode", () => {
|
||||
const base = mkdtempSync(join(tmpdir(), "gsd-parallel-crash-"));
|
||||
mkdirSync(join(base, ".gsd"), { recursive: true });
|
||||
|
||||
try {
|
||||
process.env.GSD_PARALLEL_WORKER = "1";
|
||||
process.env.GSD_MILESTONE_LOCK = "M002";
|
||||
|
||||
writeLock(base, "execute-task", "M002/S01/T01");
|
||||
|
||||
const gsdDir = gsdRoot(base);
|
||||
const lockFile = join(gsdDir, "auto-M002.lock");
|
||||
assert.ok(existsSync(lockFile), "crash-recovery writes auto-M002.lock");
|
||||
|
||||
const data = readCrashLock(base);
|
||||
assert.ok(data !== null, "readCrashLock reads per-milestone lock");
|
||||
assert.equal(data!.unitId, "M002/S01/T01");
|
||||
|
||||
clearLock(base);
|
||||
assert.ok(!existsSync(lockFile), "clearLock removes per-milestone lock");
|
||||
} finally {
|
||||
delete process.env.GSD_PARALLEL_WORKER;
|
||||
delete process.env.GSD_MILESTONE_LOCK;
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Bug 3: syncProjectRootToWorktree skips same-path symlinks ───────────
|
||||
test("Bug 3: syncProjectRootToWorktree skips when .gsd resolves to same path (symlink)", () => {
|
||||
const base = mkdtempSync(join(tmpdir(), "gsd-symlink-sync-"));
|
||||
const externalGsd = join(base, "external-gsd");
|
||||
const projectRoot = join(base, "project");
|
||||
const worktreePath = join(base, "worktree");
|
||||
|
||||
mkdirSync(externalGsd, { recursive: true });
|
||||
mkdirSync(projectRoot, { recursive: true });
|
||||
mkdirSync(worktreePath, { recursive: true });
|
||||
|
||||
// Create the external state directory with a milestone
|
||||
mkdirSync(join(externalGsd, "milestones", "M001"), { recursive: true });
|
||||
writeFileSync(
|
||||
join(externalGsd, "milestones", "M001", "M001-ROADMAP.md"),
|
||||
"# Roadmap",
|
||||
);
|
||||
|
||||
// Symlink both project and worktree .gsd to the same external directory
|
||||
symlinkSync(externalGsd, join(projectRoot, ".gsd"));
|
||||
symlinkSync(externalGsd, join(worktreePath, ".gsd"));
|
||||
|
||||
try {
|
||||
// This should NOT throw ERR_FS_CP_EINVAL — it should skip silently
|
||||
let threw = false;
|
||||
try {
|
||||
syncProjectRootToWorktree(projectRoot, worktreePath, "M001");
|
||||
} catch {
|
||||
threw = true;
|
||||
}
|
||||
assert.ok(!threw, "syncProjectRootToWorktree does not throw on same-path symlink");
|
||||
|
||||
// Same for reverse direction
|
||||
threw = false;
|
||||
try {
|
||||
syncStateToProjectRoot(worktreePath, projectRoot, "M001");
|
||||
} catch {
|
||||
threw = true;
|
||||
}
|
||||
assert.ok(!threw, "syncStateToProjectRoot does not throw on same-path symlink");
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Bug 3b: sync still works when paths are different ───────────────────
|
||||
test("Bug 3b: syncProjectRootToWorktree copies when .gsd paths are different", () => {
|
||||
const base = mkdtempSync(join(tmpdir(), "gsd-diff-sync-"));
|
||||
const projectRoot = join(base, "project");
|
||||
const worktreePath = join(base, "worktree");
|
||||
|
||||
mkdirSync(join(projectRoot, ".gsd", "milestones", "M001"), { recursive: true });
|
||||
mkdirSync(join(worktreePath, ".gsd", "milestones"), { recursive: true });
|
||||
|
||||
writeFileSync(
|
||||
join(projectRoot, ".gsd", "milestones", "M001", "M001-ROADMAP.md"),
|
||||
"# Roadmap content",
|
||||
);
|
||||
|
||||
try {
|
||||
syncProjectRootToWorktree(projectRoot, worktreePath, "M001");
|
||||
|
||||
// The roadmap should have been copied
|
||||
const copied = join(worktreePath, ".gsd", "milestones", "M001", "M001-ROADMAP.md");
|
||||
assert.ok(existsSync(copied), "milestone roadmap copied to worktree");
|
||||
assert.equal(readFileSync(copied, "utf-8"), "# Roadmap content");
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
import { describe, test } from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
import { buildPlanMilestonePrompt } from "../auto-prompts.ts";
|
||||
|
||||
function createBase(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), "gsd-plan-queue-"));
|
||||
mkdirSync(join(base, ".gsd", "milestones", "M010"), { recursive: true });
|
||||
return base;
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
describe("plan-milestone queue context", () => {
|
||||
test("includes queue brief when planning milestone without roadmap context", async () => {
|
||||
const base = createBase();
|
||||
try {
|
||||
writeFileSync(
|
||||
join(base, ".gsd", "QUEUE.md"),
|
||||
[
|
||||
"# Queue",
|
||||
"",
|
||||
"### M010: Analytics Dashboard — Interactivity, Intelligence & Demo Readiness",
|
||||
"**Vision:** Ship a polished analytics dashboard with drilldowns and AI assistance.",
|
||||
"",
|
||||
"## Scope",
|
||||
"- Interactivity",
|
||||
"- Intelligence",
|
||||
"- Demo readiness",
|
||||
"",
|
||||
].join("\n"),
|
||||
);
|
||||
|
||||
const prompt = await buildPlanMilestonePrompt("M010", "M010", base);
|
||||
|
||||
assert.match(prompt, /Source: `\.gsd\/QUEUE\.md`/);
|
||||
assert.match(prompt, /Analytics Dashboard — Interactivity, Intelligence & Demo Readiness/);
|
||||
assert.match(prompt, /Ship a polished analytics dashboard/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
@ -1,17 +1,19 @@
|
|||
/**
|
||||
* Regression tests for #2684: preferences.md must be included in both
|
||||
* ROOT_STATE_FILES (sync) and copyPlanningArtifacts (initial seed).
|
||||
* Regression tests for #2684 plus uppercase-preference normalization:
|
||||
* preferences files are handled explicitly
|
||||
* outside ROOT_STATE_FILES and prefer canonical PREFERENCES.md over the
|
||||
* legacy lowercase fallback.
|
||||
*
|
||||
* Without this, post_unit_hooks and all preference-driven config silently
|
||||
* stop working inside auto-mode worktrees.
|
||||
*/
|
||||
import { test } from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { readFileSync, mkdtempSync, mkdirSync, writeFileSync, existsSync, rmSync } from "node:fs";
|
||||
import { readFileSync, mkdtempSync, mkdirSync, writeFileSync, existsSync, readdirSync, rmSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
test("#2684: preferences.md is NOT in ROOT_STATE_FILES (forward-only sync)", () => {
|
||||
test("#2684: preferences files are NOT in ROOT_STATE_FILES (forward-only sync)", () => {
|
||||
const srcPath = join(import.meta.dirname, "..", "auto-worktree.ts");
|
||||
const src = readFileSync(srcPath, "utf-8");
|
||||
|
||||
|
|
@ -22,21 +24,23 @@ test("#2684: preferences.md is NOT in ROOT_STATE_FILES (forward-only sync)", ()
|
|||
const arrayEnd = src.indexOf("] as const", arrayStart);
|
||||
const block = src.slice(arrayStart, arrayEnd);
|
||||
|
||||
// preferences.md must NOT be in ROOT_STATE_FILES — it is handled separately
|
||||
// Project preferences must NOT be in ROOT_STATE_FILES — they are handled separately
|
||||
// in syncGsdStateToWorktree() (forward-only, additive). Including it in
|
||||
// ROOT_STATE_FILES would cause syncWorktreeStateBack() to overwrite the
|
||||
// authoritative project root copy (#2684).
|
||||
const entries = block.split("\n")
|
||||
.map(l => l.trim())
|
||||
.filter(l => l.startsWith('"') && l.includes(".md"));
|
||||
const hasPrefs = entries.some(l => l.includes("preferences.md"));
|
||||
const hasPrefs = entries.some(
|
||||
l => l.includes("PREFERENCES.md") || l.includes("preferences.md"),
|
||||
);
|
||||
assert.ok(
|
||||
!hasPrefs,
|
||||
"preferences.md must NOT be in ROOT_STATE_FILES (back-sync would overwrite root)",
|
||||
"preferences files must NOT be in ROOT_STATE_FILES (back-sync would overwrite root)",
|
||||
);
|
||||
});
|
||||
|
||||
test("#2684: copyPlanningArtifacts file list includes preferences.md", () => {
|
||||
test("copyPlanningArtifacts prefers canonical PREFERENCES.md with lowercase fallback", () => {
|
||||
const srcPath = join(import.meta.dirname, "..", "auto-worktree.ts");
|
||||
const src = readFileSync(srcPath, "utf-8");
|
||||
|
||||
|
|
@ -45,15 +49,15 @@ test("#2684: copyPlanningArtifacts file list includes preferences.md", () => {
|
|||
assert.ok(fnIdx !== -1, "copyPlanningArtifacts function exists");
|
||||
|
||||
// Extract function body (up to the next top-level function)
|
||||
const fnBody = src.slice(fnIdx, fnIdx + 1500);
|
||||
const fnBody = src.slice(fnIdx, fnIdx + 2200);
|
||||
|
||||
assert.ok(
|
||||
fnBody.includes('"preferences.md"'),
|
||||
"preferences.md should be in copyPlanningArtifacts file list",
|
||||
fnBody.includes("PROJECT_PREFERENCES_FILE") && fnBody.includes("LEGACY_PROJECT_PREFERENCES_FILE"),
|
||||
"copyPlanningArtifacts should prefer canonical PREFERENCES.md and retain lowercase fallback via the shared constants",
|
||||
);
|
||||
});
|
||||
|
||||
test("#2684: syncGsdStateToWorktree copies preferences.md", async () => {
|
||||
test("syncGsdStateToWorktree copies canonical PREFERENCES.md", async () => {
|
||||
// Functional test: create a mock source and destination, call the sync
|
||||
const srcBase = mkdtempSync(join(tmpdir(), "gsd-wt-prefs-src-"));
|
||||
const dstBase = mkdtempSync(join(tmpdir(), "gsd-wt-prefs-dst-"));
|
||||
|
|
@ -63,9 +67,9 @@ test("#2684: syncGsdStateToWorktree copies preferences.md", async () => {
|
|||
mkdirSync(dstGsd, { recursive: true });
|
||||
|
||||
try {
|
||||
// Write a preferences.md in source
|
||||
// Write a canonical PREFERENCES.md in source
|
||||
writeFileSync(
|
||||
join(srcGsd, "preferences.md"),
|
||||
join(srcGsd, "PREFERENCES.md"),
|
||||
"---\nversion: 1\n---\n\npost_unit_hooks:\n - name: notify\n command: echo done\n",
|
||||
);
|
||||
|
||||
|
|
@ -73,16 +77,54 @@ test("#2684: syncGsdStateToWorktree copies preferences.md", async () => {
|
|||
const { syncGsdStateToWorktree } = await import("../auto-worktree.ts");
|
||||
syncGsdStateToWorktree(srcBase, dstBase);
|
||||
|
||||
// Verify preferences.md was copied
|
||||
// Verify PREFERENCES.md was copied
|
||||
assert.ok(
|
||||
existsSync(join(dstGsd, "preferences.md")),
|
||||
"preferences.md should be copied to worktree",
|
||||
existsSync(join(dstGsd, "PREFERENCES.md")),
|
||||
"PREFERENCES.md should be copied to worktree",
|
||||
);
|
||||
|
||||
const content = readFileSync(join(dstGsd, "preferences.md"), "utf-8");
|
||||
const content = readFileSync(join(dstGsd, "PREFERENCES.md"), "utf-8");
|
||||
assert.ok(
|
||||
content.includes("post_unit_hooks"),
|
||||
"copied preferences.md should contain the hooks config",
|
||||
"copied PREFERENCES.md should contain the hooks config",
|
||||
);
|
||||
} finally {
|
||||
rmSync(srcBase, { recursive: true, force: true });
|
||||
rmSync(dstBase, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("syncGsdStateToWorktree falls back to legacy lowercase preferences.md", async () => {
|
||||
const srcBase = mkdtempSync(join(tmpdir(), "gsd-wt-prefs-legacy-src-"));
|
||||
const dstBase = mkdtempSync(join(tmpdir(), "gsd-wt-prefs-legacy-dst-"));
|
||||
const srcGsd = join(srcBase, ".gsd");
|
||||
const dstGsd = join(dstBase, ".gsd");
|
||||
mkdirSync(srcGsd, { recursive: true });
|
||||
mkdirSync(dstGsd, { recursive: true });
|
||||
|
||||
try {
|
||||
writeFileSync(
|
||||
join(srcGsd, "preferences.md"),
|
||||
"---\nversion: 1\n---\n\ngit:\n auto_push: true\n",
|
||||
);
|
||||
|
||||
const { syncGsdStateToWorktree } = await import("../auto-worktree.ts");
|
||||
const result = syncGsdStateToWorktree(srcBase, dstBase);
|
||||
|
||||
const copiedEntries = readdirSync(dstGsd)
|
||||
.filter((name) => name === "PREFERENCES.md" || name === "preferences.md");
|
||||
|
||||
assert.ok(
|
||||
copiedEntries.length === 1,
|
||||
`expected exactly one preferences file in worktree, got ${copiedEntries.join(", ") || "(none)"}`,
|
||||
);
|
||||
assert.ok(
|
||||
copiedEntries[0] === "PREFERENCES.md" || copiedEntries[0] === "preferences.md",
|
||||
"legacy fallback should still result in one readable preferences file",
|
||||
);
|
||||
assert.ok(
|
||||
result.synced.includes("preferences.md") || result.synced.includes("PREFERENCES.md"),
|
||||
"legacy fallback copy should be reported in synced list",
|
||||
);
|
||||
} finally {
|
||||
rmSync(srcBase, { recursive: true, force: true });
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import { join, dirname } from "node:path";
|
|||
import { fileURLToPath } from "node:url";
|
||||
import { classifyError, isTransient, isTransientNetworkError } from "../error-classifier.ts";
|
||||
import { pauseAutoForProviderError } from "../provider-error-pause.ts";
|
||||
import { resumeAutoAfterProviderDelay } from "../bootstrap/provider-error-resume.ts";
|
||||
import { getNextFallbackModel } from "../preferences.ts";
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
|
@ -268,6 +269,90 @@ test("pauseAutoForProviderError falls back to indefinite pause when not rate lim
|
|||
]);
|
||||
});
|
||||
|
||||
// ── resumeAutoAfterProviderDelay ────────────────────────────────────────────
|
||||
|
||||
test("resumeAutoAfterProviderDelay restarts paused auto-mode from the recorded base path", async () => {
|
||||
const startCalls: Array<{ base: string; verboseMode: boolean; step?: boolean }> = [];
|
||||
const result = await resumeAutoAfterProviderDelay(
|
||||
{} as any,
|
||||
{ ui: { notify() {} } } as any,
|
||||
{
|
||||
getSnapshot: () => ({
|
||||
active: false,
|
||||
paused: true,
|
||||
stepMode: true,
|
||||
basePath: "/tmp/project",
|
||||
}),
|
||||
startAuto: async (_ctx, _pi, base, verboseMode, options) => {
|
||||
startCalls.push({ base, verboseMode, step: options?.step });
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(result, "resumed");
|
||||
assert.deepEqual(startCalls, [
|
||||
{ base: "/tmp/project", verboseMode: false, step: true },
|
||||
]);
|
||||
});
|
||||
|
||||
test("resumeAutoAfterProviderDelay does not double-start when auto-mode is already active", async () => {
|
||||
let startCalls = 0;
|
||||
const result = await resumeAutoAfterProviderDelay(
|
||||
{} as any,
|
||||
{ ui: { notify() {} } } as any,
|
||||
{
|
||||
getSnapshot: () => ({
|
||||
active: true,
|
||||
paused: false,
|
||||
stepMode: false,
|
||||
basePath: "/tmp/project",
|
||||
}),
|
||||
startAuto: async () => {
|
||||
startCalls += 1;
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(result, "already-active");
|
||||
assert.equal(startCalls, 0);
|
||||
});
|
||||
|
||||
test("resumeAutoAfterProviderDelay leaves auto paused when no base path is available", async () => {
|
||||
const notifications: Array<{ message: string; level: string }> = [];
|
||||
let startCalls = 0;
|
||||
|
||||
const result = await resumeAutoAfterProviderDelay(
|
||||
{} as any,
|
||||
{
|
||||
ui: {
|
||||
notify(message: string, level?: string) {
|
||||
notifications.push({ message, level: level ?? "info" });
|
||||
},
|
||||
},
|
||||
} as any,
|
||||
{
|
||||
getSnapshot: () => ({
|
||||
active: false,
|
||||
paused: true,
|
||||
stepMode: false,
|
||||
basePath: "",
|
||||
}),
|
||||
startAuto: async () => {
|
||||
startCalls += 1;
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(result, "missing-base");
|
||||
assert.equal(startCalls, 0);
|
||||
assert.deepEqual(notifications, [
|
||||
{
|
||||
message: "Provider error recovery delay elapsed, but no paused auto-mode base path was available. Leaving auto-mode paused.",
|
||||
level: "warning",
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
// ── Escalating backoff for transient errors (#1166) ─────────────────────────
|
||||
|
||||
test("agent-end-recovery.ts tracks consecutive transient errors for escalating backoff", () => {
|
||||
|
|
@ -303,6 +388,19 @@ test("agent-end-recovery.ts applies escalating delay for repeated transient erro
|
|||
);
|
||||
});
|
||||
|
||||
test("agent-end-recovery.ts resumes transient provider pauses through startAuto instead of a hidden prompt", () => {
|
||||
const src = readFileSync(join(__dirname, "..", "bootstrap", "agent-end-recovery.ts"), "utf-8");
|
||||
|
||||
assert.ok(
|
||||
src.includes("resumeAutoAfterProviderDelay"),
|
||||
"agent-end-recovery.ts must resume paused auto-mode through resumeAutoAfterProviderDelay (#2813)",
|
||||
);
|
||||
assert.ok(
|
||||
!src.includes('Continue execution — provider error recovery delay elapsed.'),
|
||||
"transient provider resume must not rely on a hidden continue prompt (#2813)",
|
||||
);
|
||||
});
|
||||
|
||||
// ── Codex error extraction (#1166) ──────────────────────────────────────────
|
||||
|
||||
test("openai-codex-responses.ts extracts nested error fields", () => {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,59 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
|
||||
import { handleRecoverableExtensionProcessError } from "../bootstrap/register-extension.ts";
|
||||
|
||||
test("handleRecoverableExtensionProcessError swallows spawn ENOENT", () => {
|
||||
let stderr = "";
|
||||
const originalWrite = process.stderr.write.bind(process.stderr);
|
||||
process.stderr.write = ((chunk: string | Uint8Array) => {
|
||||
stderr += String(chunk);
|
||||
return true;
|
||||
}) as typeof process.stderr.write;
|
||||
|
||||
try {
|
||||
const handled = handleRecoverableExtensionProcessError(
|
||||
Object.assign(new Error("missing binary"), {
|
||||
code: "ENOENT",
|
||||
syscall: "spawn npm",
|
||||
path: "npm",
|
||||
}),
|
||||
);
|
||||
assert.equal(handled, true);
|
||||
assert.match(stderr, /spawn ENOENT: npm/);
|
||||
} finally {
|
||||
process.stderr.write = originalWrite;
|
||||
}
|
||||
});
|
||||
|
||||
test("handleRecoverableExtensionProcessError swallows uv_cwd ENOENT", () => {
|
||||
let stderr = "";
|
||||
const originalWrite = process.stderr.write.bind(process.stderr);
|
||||
process.stderr.write = ((chunk: string | Uint8Array) => {
|
||||
stderr += String(chunk);
|
||||
return true;
|
||||
}) as typeof process.stderr.write;
|
||||
|
||||
try {
|
||||
const handled = handleRecoverableExtensionProcessError(
|
||||
Object.assign(new Error("process.cwd failed"), {
|
||||
code: "ENOENT",
|
||||
syscall: "uv_cwd",
|
||||
}),
|
||||
);
|
||||
assert.equal(handled, true);
|
||||
assert.match(stderr, /ENOENT \(uv_cwd\): process\.cwd failed/);
|
||||
} finally {
|
||||
process.stderr.write = originalWrite;
|
||||
}
|
||||
});
|
||||
|
||||
test("handleRecoverableExtensionProcessError leaves unrelated errors unhandled", () => {
|
||||
const handled = handleRecoverableExtensionProcessError(
|
||||
Object.assign(new Error("permission denied"), {
|
||||
code: "EPERM",
|
||||
syscall: "open",
|
||||
}),
|
||||
);
|
||||
assert.equal(handled, false);
|
||||
});
|
||||
|
|
@ -1,11 +1,12 @@
|
|||
/**
|
||||
* worktree-preferences-sync.test.ts — Regression test for #2684.
|
||||
*
|
||||
* Verifies that preferences.md is seeded into auto-mode worktrees:
|
||||
* Verifies that canonical PREFERENCES.md is seeded into auto-mode worktrees,
|
||||
* while legacy lowercase preferences.md remains supported:
|
||||
*
|
||||
* 1. copyPlanningArtifacts() copies preferences.md on initial worktree creation
|
||||
* 2. syncGsdStateToWorktree() forward-syncs preferences.md (additive only)
|
||||
* 3. syncWorktreeStateBack() does NOT overwrite project root preferences.md
|
||||
* 1. syncGsdStateToWorktree() forward-syncs PREFERENCES.md (additive only)
|
||||
* 2. syncGsdStateToWorktree() still accepts legacy lowercase preferences.md
|
||||
* 3. syncWorktreeStateBack() does NOT overwrite project root PREFERENCES.md
|
||||
*/
|
||||
|
||||
import test from "node:test";
|
||||
|
|
@ -15,6 +16,7 @@ import {
|
|||
mkdirSync,
|
||||
mkdtempSync,
|
||||
readFileSync,
|
||||
readdirSync,
|
||||
rmSync,
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
|
|
@ -56,35 +58,58 @@ const PREFS_CONTENT = [
|
|||
' - use: "frontend-design"',
|
||||
].join("\n");
|
||||
|
||||
test("#2684: syncGsdStateToWorktree forward-syncs preferences.md when missing from worktree", (t) => {
|
||||
test("#2684: syncGsdStateToWorktree forward-syncs PREFERENCES.md when missing from worktree", (t) => {
|
||||
const mainBase = makeTempDir("main");
|
||||
const wtBase = makeTempDir("wt");
|
||||
t.after(() => cleanup(mainBase, wtBase));
|
||||
|
||||
// Project root has preferences.md
|
||||
writeFile(mainBase, ".gsd/preferences.md", PREFS_CONTENT);
|
||||
// Project root has canonical PREFERENCES.md
|
||||
writeFile(mainBase, ".gsd/PREFERENCES.md", PREFS_CONTENT);
|
||||
|
||||
// Worktree has .gsd/ but no preferences.md
|
||||
// Worktree has .gsd/ but no preferences file
|
||||
mkdirSync(join(wtBase, ".gsd"), { recursive: true });
|
||||
|
||||
const result = syncGsdStateToWorktree(mainBase, wtBase);
|
||||
|
||||
assert.ok(
|
||||
existsSync(join(wtBase, ".gsd", "preferences.md")),
|
||||
"preferences.md should be copied to worktree",
|
||||
existsSync(join(wtBase, ".gsd", "PREFERENCES.md")),
|
||||
"PREFERENCES.md should be copied to worktree",
|
||||
);
|
||||
assert.equal(
|
||||
readFileSync(join(wtBase, ".gsd", "preferences.md"), "utf-8"),
|
||||
readFileSync(join(wtBase, ".gsd", "PREFERENCES.md"), "utf-8"),
|
||||
PREFS_CONTENT,
|
||||
"preferences.md content should match source",
|
||||
"PREFERENCES.md content should match source",
|
||||
);
|
||||
assert.ok(
|
||||
result.synced.includes("preferences.md"),
|
||||
"preferences.md should appear in synced list",
|
||||
result.synced.includes("PREFERENCES.md"),
|
||||
"PREFERENCES.md should appear in synced list",
|
||||
);
|
||||
});
|
||||
|
||||
test("#2684: syncGsdStateToWorktree does NOT overwrite existing worktree preferences.md", (t) => {
|
||||
test("syncGsdStateToWorktree still accepts legacy lowercase preferences.md", (t) => {
|
||||
const mainBase = makeTempDir("main");
|
||||
const wtBase = makeTempDir("wt");
|
||||
t.after(() => cleanup(mainBase, wtBase));
|
||||
|
||||
writeFile(mainBase, ".gsd/preferences.md", PREFS_CONTENT);
|
||||
mkdirSync(join(wtBase, ".gsd"), { recursive: true });
|
||||
|
||||
const result = syncGsdStateToWorktree(mainBase, wtBase);
|
||||
|
||||
const copiedEntries = readdirSync(join(wtBase, ".gsd"))
|
||||
.filter((name) => name === "PREFERENCES.md" || name === "preferences.md");
|
||||
|
||||
assert.ok(
|
||||
copiedEntries.length === 1,
|
||||
`expected exactly one preferences file in worktree, got ${copiedEntries.join(", ") || "(none)"}`,
|
||||
);
|
||||
assert.ok(
|
||||
result.synced.includes("preferences.md") || result.synced.includes("PREFERENCES.md"),
|
||||
"legacy source should still appear in synced list",
|
||||
);
|
||||
});
|
||||
|
||||
test("#2684: syncGsdStateToWorktree does NOT overwrite existing worktree preferences file", (t) => {
|
||||
const mainBase = makeTempDir("main");
|
||||
const wtBase = makeTempDir("wt");
|
||||
t.after(() => cleanup(mainBase, wtBase));
|
||||
|
|
@ -92,19 +117,19 @@ test("#2684: syncGsdStateToWorktree does NOT overwrite existing worktree prefere
|
|||
const rootPrefs = "# Root preferences\nold: true";
|
||||
const wtPrefs = "# Worktree preferences\nmodified: true";
|
||||
|
||||
writeFile(mainBase, ".gsd/preferences.md", rootPrefs);
|
||||
writeFile(wtBase, ".gsd/preferences.md", wtPrefs);
|
||||
writeFile(mainBase, ".gsd/PREFERENCES.md", rootPrefs);
|
||||
writeFile(wtBase, ".gsd/PREFERENCES.md", wtPrefs);
|
||||
|
||||
syncGsdStateToWorktree(mainBase, wtBase);
|
||||
|
||||
assert.equal(
|
||||
readFileSync(join(wtBase, ".gsd", "preferences.md"), "utf-8"),
|
||||
readFileSync(join(wtBase, ".gsd", "PREFERENCES.md"), "utf-8"),
|
||||
wtPrefs,
|
||||
"existing worktree preferences.md must not be overwritten",
|
||||
"existing worktree PREFERENCES.md must not be overwritten",
|
||||
);
|
||||
});
|
||||
|
||||
test("#2684: syncWorktreeStateBack does NOT overwrite project root preferences.md", (t) => {
|
||||
test("#2684: syncWorktreeStateBack does NOT overwrite project root PREFERENCES.md", (t) => {
|
||||
const mainBase = makeTempDir("main");
|
||||
const wtBase = makeTempDir("wt");
|
||||
const mid = "M001";
|
||||
|
|
@ -113,8 +138,8 @@ test("#2684: syncWorktreeStateBack does NOT overwrite project root preferences.m
|
|||
const rootPrefs = "# Root preferences\nauthoritative: true";
|
||||
const wtPrefs = "# Worktree preferences\nstale-copy: true";
|
||||
|
||||
writeFile(mainBase, ".gsd/preferences.md", rootPrefs);
|
||||
writeFile(wtBase, ".gsd/preferences.md", wtPrefs);
|
||||
writeFile(mainBase, ".gsd/PREFERENCES.md", rootPrefs);
|
||||
writeFile(wtBase, ".gsd/PREFERENCES.md", wtPrefs);
|
||||
|
||||
// Worktree needs at least a milestone dir for the function to proceed
|
||||
mkdirSync(join(wtBase, ".gsd", "milestones", mid), { recursive: true });
|
||||
|
|
@ -123,8 +148,8 @@ test("#2684: syncWorktreeStateBack does NOT overwrite project root preferences.m
|
|||
syncWorktreeStateBack(mainBase, wtBase, mid);
|
||||
|
||||
assert.equal(
|
||||
readFileSync(join(mainBase, ".gsd", "preferences.md"), "utf-8"),
|
||||
readFileSync(join(mainBase, ".gsd", "PREFERENCES.md"), "utf-8"),
|
||||
rootPrefs,
|
||||
"project root preferences.md must NOT be overwritten by worktree copy",
|
||||
"project root PREFERENCES.md must NOT be overwritten by worktree copy",
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -14,7 +14,10 @@ import {
|
|||
renderHealthView,
|
||||
type ProgressFilter,
|
||||
} from "./visualizer-views.js";
|
||||
import { writeFileSync, mkdirSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { writeExportFile } from "./export.js";
|
||||
import { gsdRoot } from "./paths.js";
|
||||
import { stripAnsi } from "../shared/mod.js";
|
||||
|
||||
const TAB_COUNT = 10;
|
||||
|
|
@ -350,9 +353,6 @@ export class GSDVisualizerOverlay {
|
|||
// Capture current active tab's rendered lines as snapshot
|
||||
const snapshotLines = this.renderTabContent(this.activeTab, 80);
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, "-").slice(0, 19);
|
||||
const { writeFileSync, mkdirSync } = require("node:fs");
|
||||
const { join } = require("node:path");
|
||||
const { gsdRoot } = require("./paths.js");
|
||||
const exportDir = gsdRoot(this.basePath);
|
||||
mkdirSync(exportDir, { recursive: true });
|
||||
const outPath = join(exportDir, `snapshot-${timestamp}.txt`);
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
/** Format a millisecond duration as a compact human-readable string. */
|
||||
export function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`;
|
||||
if (ms > 0 && ms < 1000) return `${ms}ms`;
|
||||
const s = Math.floor(ms / 1000);
|
||||
if (s < 60) return `${s}s`;
|
||||
const m = Math.floor(s / 60);
|
||||
|
|
|
|||
|
|
@ -54,9 +54,10 @@ export function updateWorker(id: string, status: "completed" | "failed"): void {
|
|||
if (entry) {
|
||||
entry.status = status;
|
||||
// Remove after a brief display window (5 seconds)
|
||||
// unref() so the timer doesn't keep the process alive in test environments
|
||||
setTimeout(() => {
|
||||
activeWorkers.delete(id);
|
||||
}, 5000);
|
||||
}, 5000).unref();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -187,6 +187,19 @@ test("loader MIN_NODE_MAJOR matches package.json engines field", () => {
|
|||
`loader MIN_NODE_MAJOR (${loaderMin}) must match package.json engines.node (>=${engineMin}.0.0)`);
|
||||
});
|
||||
|
||||
test("cli.ts lets gsd update bypass the managed-resource mismatch gate", () => {
|
||||
const cliSrc = readFileSync(join(projectRoot, "src", "cli.ts"), "utf-8");
|
||||
const updateBranchIndex = cliSrc.indexOf("if (cliFlags.messages[0] === 'update')")
|
||||
const mismatchGateIndex = cliSrc.indexOf("exitIfManagedResourcesAreNewer(agentDir)")
|
||||
|
||||
assert.ok(updateBranchIndex !== -1, "cli.ts contains an update branch")
|
||||
assert.ok(mismatchGateIndex !== -1, "cli.ts contains the managed-resource mismatch gate")
|
||||
assert.ok(
|
||||
updateBranchIndex < mismatchGateIndex,
|
||||
"gsd update must run before the managed-resource mismatch gate",
|
||||
)
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 3. resource-loader syncs bundled resources
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
|
||||
import { resolveBgShellPersistenceCwd } from "../resources/extensions/bg-shell/utilities.ts";
|
||||
import {
|
||||
getBgShellLiveCwd,
|
||||
resolveBgShellPersistenceCwd,
|
||||
} from "../resources/extensions/bg-shell/utilities.ts";
|
||||
|
||||
test("keeps non-worktree cwd unchanged", () => {
|
||||
const cached = "/repo";
|
||||
|
|
@ -43,3 +46,18 @@ test("keeps current auto-worktree cwd when it still matches process cwd", () =>
|
|||
cached,
|
||||
);
|
||||
});
|
||||
|
||||
test("falls back to project root when process.cwd throws inside a stale auto-worktree", () => {
|
||||
const cached = "/repo/.gsd/worktrees/M001";
|
||||
const live = getBgShellLiveCwd(
|
||||
cached,
|
||||
(path) => path === "/repo",
|
||||
() => {
|
||||
throw Object.assign(new Error("uv_cwd"), { code: "ENOENT", syscall: "uv_cwd" });
|
||||
},
|
||||
() => {},
|
||||
);
|
||||
|
||||
assert.equal(live, "/repo");
|
||||
assert.equal(resolveBgShellPersistenceCwd(cached, live, (path) => path === "/repo"), "/repo");
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,11 +1,9 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { resolve, dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { resolve } from "node:path";
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const root = resolve(__dirname, "../..");
|
||||
const root = process.cwd();
|
||||
|
||||
function readFile(relativePath: string): string {
|
||||
const full = resolve(root, relativePath);
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue