Merge branch 'main' into feat-google-oauth-search
This commit is contained in:
commit
4f9125daa6
25 changed files with 1566 additions and 65 deletions
58
CHANGELOG.md
58
CHANGELOG.md
|
|
@ -6,6 +6,57 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
## [2.14.4] - 2026-03-15
|
||||
|
||||
### Fixed
|
||||
- **Session cwd update** — `newSession()` now updates the LLM's perceived working directory to reflect `process.chdir()` into auto-worktrees. Previously the system prompt was frozen at the original project root, causing the LLM to `cd` back and write files to the wrong location. This was the root cause of complete-slice and plan-slice loops in worktree-based projects.
|
||||
|
||||
## [2.14.3] - 2026-03-15
|
||||
|
||||
### Fixed
|
||||
- **Copy planning artifacts into new auto-worktrees** — `createAutoWorktree` now copies `.gsd/milestones/`, `DECISIONS.md`, `REQUIREMENTS.md`, `PROJECT.md` from the source repo into the worktree. Prevents plan-slice loops in projects with pre-v2.14.0 `.gitignore`.
|
||||
|
||||
## [2.14.2] - 2026-03-15
|
||||
|
||||
### Fixed
|
||||
- **Dispatch reentrancy deadlock** — `_dispatching` flag was never reset after first dispatch, permanently blocking all subsequent unit dispatches. Wrapped in try/finally.
|
||||
- **`.gitignore` self-heal** — existing projects with blanket `.gsd/` ignore now auto-remove it on next auto-mode start, replacing with explicit runtime-only patterns so planning artifacts are tracked in git.
|
||||
- **Discuss depth verification** — render summary as chat text (markdown renders), use ask_user_questions for short confirmation only.
|
||||
|
||||
## [2.14.1] - 2026-03-15
|
||||
|
||||
### Fixed
|
||||
- **Quiet auto-mode warnings** — internal recovery machinery (dispatch gap watchdog, model fallback chain) downgraded to verbose-only. Users only see warnings when action is needed.
|
||||
- **Dispatch recovery hardening** — artifact fallback when completion key missing, TUI freeze prevention, reentrancy guard, atomic writes, stale runtime record cleanup
|
||||
|
||||
## [2.14.0] - 2026-03-15
|
||||
|
||||
### Added
|
||||
- **Discussion manifest** — mechanical process verification for multi-milestone context discussions
|
||||
- **Session-internal `/gsd config`** — configure GSD settings within a running session
|
||||
- **Model selection UI** — select list instead of free-text input for model preferences
|
||||
- **Startup performance** — faster GSD launch via optimized initialization
|
||||
|
||||
### Changed
|
||||
- **Branchless worktree architecture** — eliminated slice branches entirely. All work commits sequentially on `milestone/<MID>` within auto-mode worktrees. No branch creation, switching, or merging within a worktree. ~2600 lines of merge/conflict/branch-switching code removed.
|
||||
- **`.gitignore` overhaul** — planning artifacts (`.gsd/milestones/`) are tracked in git naturally. Only runtime files are gitignored. No more force-add hacks.
|
||||
- **Multi-milestone enforcement** — `depends_on` frontmatter enforced in multi-milestone CONTEXT.md
|
||||
|
||||
### Fixed
|
||||
- **Auto-mode loop detection failures** — artifacts on wrong branch or invisible after branch switch no longer possible (root cause eliminated by branchless architecture)
|
||||
- **Nested worktree creation** — auto-mode no longer creates worktrees inside existing manual worktrees, preventing wrong-repo state reads and "All milestones complete" false positives
|
||||
- **Dispatch recovery hardening** — artifact fallback when completion key missing, TUI freeze prevention on cascading skips, reentrancy guard, atomic writes, stale runtime record cleanup, git index.lock cleanup
|
||||
- **Hook orchestration** — finalize runtime records, add supervision, fix retry
|
||||
- **Empty slice plan stays in planning** — no longer incorrectly transitions to summarizing
|
||||
- **Prefs wizard** — launch directly from `/gsd prefs`, fix parse/serialize cycle for empty arrays
|
||||
- **Discussion routing** — `/gsd discuss` routes to draft when phase is needs-discussion
|
||||
|
||||
### Removed
|
||||
- `ensureSliceBranch()`, `switchToMain()`, `mergeSliceToMain()`, `mergeSliceToMilestone()`
|
||||
- `shouldUseWorktreeIsolation()`, `getMergeToMainMode()`, `buildFixMergePrompt()`
|
||||
- `withMergeHeal()`, `recoverCheckout()`, `fix-merge` unit type
|
||||
- `git.isolation` and `git.merge_to_main` preferences (deprecated with warnings)
|
||||
|
||||
## [2.13.1] - 2026-03-15
|
||||
|
||||
### Fixed
|
||||
|
|
@ -607,7 +658,12 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
|||
### Changed
|
||||
- License updated to MIT
|
||||
|
||||
[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.13.1...HEAD
|
||||
[Unreleased]: https://github.com/gsd-build/gsd-2/compare/v2.14.4...HEAD
|
||||
[2.14.4]: https://github.com/gsd-build/gsd-2/compare/v2.14.3...v2.14.4
|
||||
[2.14.3]: https://github.com/gsd-build/gsd-2/compare/v2.14.2...v2.14.3
|
||||
[2.14.2]: https://github.com/gsd-build/gsd-2/compare/v2.14.1...v2.14.2
|
||||
[2.14.1]: https://github.com/gsd-build/gsd-2/compare/v2.14.0...v2.14.1
|
||||
[2.14.0]: https://github.com/gsd-build/gsd-2/compare/v2.13.1...v2.14.0
|
||||
[2.13.1]: https://github.com/gsd-build/gsd-2/compare/v2.13.0...v2.13.1
|
||||
[2.13.0]: https://github.com/gsd-build/gsd-2/compare/v2.12.0...v2.13.0
|
||||
[2.12.0]: https://github.com/gsd-build/gsd-2/compare/v2.11.1...v2.12.0
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ GSD v2 solves all of these because it's not a prompt framework anymore — it's
|
|||
| Context management | Hope the LLM doesn't fill up | Fresh session per task, programmatic |
|
||||
| Auto mode | LLM self-loop | State machine reading `.gsd/` files |
|
||||
| Crash recovery | None | Lock files + session forensics |
|
||||
| Git strategy | LLM writes git commands | Programmatic branch-per-slice, squash merge |
|
||||
| Git strategy | LLM writes git commands | Worktree isolation, sequential commits, squash merge |
|
||||
| Cost tracking | None | Per-unit token/cost ledger with dashboard |
|
||||
| Stuck detection | None | Retry once, then stop with diagnostics |
|
||||
| Timeout supervision | None | Soft/idle/hard timeouts with recovery steering |
|
||||
|
|
@ -111,7 +111,7 @@ Auto mode is a state machine driven by files on disk. It reads `.gsd/STATE.md`,
|
|||
|
||||
2. **Context pre-loading** — The dispatch prompt includes inlined task plans, slice plans, prior task summaries, dependency summaries, roadmap excerpts, and decisions register. The LLM starts with everything it needs instead of spending tool calls reading files.
|
||||
|
||||
3. **Git branch-per-slice** — Each slice gets its own branch (`gsd/M001/S01`). Tasks commit atomically on the branch. When the slice completes, it's squash-merged to main (or whichever branch you started from) as one clean commit.
|
||||
3. **Git worktree isolation** — Each milestone runs in its own git worktree with a `milestone/<MID>` branch. All slice work commits sequentially — no branch switching, no merge conflicts. When the milestone completes, it's squash-merged to main as one clean commit.
|
||||
|
||||
4. **Crash recovery** — A lock file tracks the current unit. If the session dies, the next `/gsd auto` reads the surviving session file, synthesizes a recovery briefing from every tool call that made it to disk, and resumes with full context.
|
||||
|
||||
|
|
@ -268,7 +268,7 @@ gsd/M001/S01 (deleted after merge):
|
|||
feat(S01/T01): core types and interfaces
|
||||
```
|
||||
|
||||
One commit per slice on main (or whichever branch you started from). Squash commits are the permanent record — branches are deleted after merge. Git bisect works. Individual slices are revertable.
|
||||
One squash commit per milestone on main (or whichever branch you started from). The worktree is torn down after merge. Git bisect works. Individual milestones are revertable.
|
||||
|
||||
### Verification
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-darwin-arm64",
|
||||
"version": "2.13.1",
|
||||
"version": "2.14.4",
|
||||
"description": "GSD native engine binary for macOS ARM64",
|
||||
"os": [
|
||||
"darwin"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-darwin-x64",
|
||||
"version": "2.13.1",
|
||||
"version": "2.14.4",
|
||||
"description": "GSD native engine binary for macOS Intel",
|
||||
"os": [
|
||||
"darwin"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-linux-arm64-gnu",
|
||||
"version": "2.13.1",
|
||||
"version": "2.14.4",
|
||||
"description": "GSD native engine binary for Linux ARM64 (glibc)",
|
||||
"os": [
|
||||
"linux"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-linux-x64-gnu",
|
||||
"version": "2.13.1",
|
||||
"version": "2.14.4",
|
||||
"description": "GSD native engine binary for Linux x64 (glibc)",
|
||||
"os": [
|
||||
"linux"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-win32-x64-msvc",
|
||||
"version": "2.13.1",
|
||||
"version": "2.14.4",
|
||||
"description": "GSD native engine binary for Windows x64 (MSVC)",
|
||||
"os": [
|
||||
"win32"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "gsd-pi",
|
||||
"version": "2.13.1",
|
||||
"version": "2.14.4",
|
||||
"description": "GSD — Get Shit Done coding agent",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
|
|
|
|||
|
|
@ -1354,6 +1354,9 @@ export class AgentSession {
|
|||
this._disconnectFromAgent();
|
||||
await this.abort();
|
||||
this.agent.reset();
|
||||
// Update cwd to current process directory — auto-mode may have chdir'd
|
||||
// into a worktree since the original session was created.
|
||||
this._cwd = process.cwd();
|
||||
this.sessionManager.newSession({ parentSession: options?.parentSession });
|
||||
this.agent.sessionId = this.sessionManager.getSessionId();
|
||||
this._steeringMessages = [];
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
* manages create, enter, detect, and teardown for auto-mode worktrees.
|
||||
*/
|
||||
|
||||
import { existsSync, readFileSync, realpathSync, utimesSync } from "node:fs";
|
||||
import { existsSync, cpSync, readFileSync, realpathSync, utimesSync } from "node:fs";
|
||||
import { join, resolve } from "node:path";
|
||||
import { execSync, execFileSync } from "node:child_process";
|
||||
import {
|
||||
|
|
@ -90,6 +90,14 @@ export function autoWorktreeBranch(milestoneId: string): string {
|
|||
export function createAutoWorktree(basePath: string, milestoneId: string): string {
|
||||
const branch = autoWorktreeBranch(milestoneId);
|
||||
const info = createWorktree(basePath, milestoneId, { branch });
|
||||
|
||||
// Copy .gsd/ planning artifacts from the source repo into the new worktree.
|
||||
// Worktrees are fresh git checkouts — untracked files don't carry over.
|
||||
// Planning artifacts may be untracked if the project's .gitignore had a
|
||||
// blanket .gsd/ rule (pre-v2.14.0). Without this copy, auto-mode loops
|
||||
// on plan-slice because the plan file doesn't exist in the worktree.
|
||||
copyPlanningArtifacts(basePath, info.path);
|
||||
|
||||
const previousCwd = process.cwd();
|
||||
|
||||
try {
|
||||
|
|
@ -107,6 +115,36 @@ export function createAutoWorktree(basePath: string, milestoneId: string): strin
|
|||
return info.path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy .gsd/ planning artifacts from source repo to a new worktree.
|
||||
* Copies milestones/, DECISIONS.md, REQUIREMENTS.md, PROJECT.md, QUEUE.md.
|
||||
* Skips runtime files (auto.lock, metrics.json, etc.) and the worktrees/ dir.
|
||||
* Best-effort — failures are non-fatal since auto-mode can recreate artifacts.
|
||||
*/
|
||||
function copyPlanningArtifacts(srcBase: string, wtPath: string): void {
|
||||
const srcGsd = join(srcBase, ".gsd");
|
||||
const dstGsd = join(wtPath, ".gsd");
|
||||
if (!existsSync(srcGsd)) return;
|
||||
|
||||
// Copy milestones/ directory (planning files, roadmaps, plans, research)
|
||||
const srcMilestones = join(srcGsd, "milestones");
|
||||
if (existsSync(srcMilestones)) {
|
||||
try {
|
||||
cpSync(srcMilestones, join(dstGsd, "milestones"), { recursive: true, force: true });
|
||||
} catch { /* non-fatal */ }
|
||||
}
|
||||
|
||||
// Copy top-level planning files
|
||||
for (const file of ["DECISIONS.md", "REQUIREMENTS.md", "PROJECT.md", "QUEUE.md"]) {
|
||||
const src = join(srcGsd, file);
|
||||
if (existsSync(src)) {
|
||||
try {
|
||||
cpSync(src, join(dstGsd, file), { force: true });
|
||||
} catch { /* non-fatal */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Teardown an auto-worktree: chdir back to original base, then remove
|
||||
* the worktree and its branch.
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ import type {
|
|||
} from "@gsd/pi-coding-agent";
|
||||
|
||||
import { deriveState, invalidateStateCache } from "./state.js";
|
||||
import type { GSDState } from "./types.js";
|
||||
import type { BudgetEnforcementMode, GSDState } from "./types.js";
|
||||
import { loadFile, parseContinue, parsePlan, parseRoadmap, parseSummary, extractUatType, inlinePriorMilestoneSummary, getManifestStatus, clearParseCache } from "./files.js";
|
||||
export { inlinePriorMilestoneSummary };
|
||||
import type { UatType } from "./files.js";
|
||||
|
|
@ -42,6 +42,7 @@ import {
|
|||
writeUnitRuntimeRecord,
|
||||
} from "./unit-runtime.js";
|
||||
import { resolveAutoSupervisorConfig, resolveModelForUnit, resolveModelWithFallbacksForUnit, resolveSkillDiscoveryMode, loadEffectiveGSDPreferences } from "./preferences.js";
|
||||
import { sendDesktopNotification } from "./notifications.js";
|
||||
import type { GSDPreferences } from "./preferences.js";
|
||||
import {
|
||||
checkPostUnitHooks,
|
||||
|
|
@ -69,11 +70,13 @@ import {
|
|||
getProjectTotals, formatCost, formatTokenCount,
|
||||
} from "./metrics.js";
|
||||
import { dirname, join } from "node:path";
|
||||
import { readdirSync, readFileSync, existsSync, mkdirSync, writeFileSync, unlinkSync } from "node:fs";
|
||||
import { sep as pathSep } from "node:path";
|
||||
import { readdirSync, readFileSync, existsSync, mkdirSync, writeFileSync, unlinkSync, renameSync, statSync } from "node:fs";
|
||||
import { execSync, execFileSync } from "node:child_process";
|
||||
import {
|
||||
autoCommitCurrentBranch,
|
||||
captureIntegrationBranch,
|
||||
detectWorktreeName,
|
||||
getCurrentBranch,
|
||||
getMainBranch,
|
||||
MergeConflictError,
|
||||
|
|
@ -115,7 +118,10 @@ function persistCompletedKey(base: string, key: string): void {
|
|||
} catch { /* corrupt file — start fresh */ }
|
||||
if (!keys.includes(key)) {
|
||||
keys.push(key);
|
||||
writeFileSync(file, JSON.stringify(keys), "utf-8");
|
||||
// Atomic write: tmp file + rename prevents partial writes on crash
|
||||
const tmpFile = file + ".tmp";
|
||||
writeFileSync(tmpFile, JSON.stringify(keys), "utf-8");
|
||||
renameSync(tmpFile, file);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -181,6 +187,7 @@ let currentUnit: { type: string; id: string; startedAt: number } | null = null;
|
|||
|
||||
/** Track current milestone to detect transitions */
|
||||
let currentMilestoneId: string | null = null;
|
||||
let lastBudgetAlertLevel: BudgetAlertLevel = 0;
|
||||
|
||||
/** Model the user had selected before auto-mode started */
|
||||
let originalModelId: string | null = null;
|
||||
|
|
@ -202,6 +209,31 @@ const DISPATCH_GAP_TIMEOUT_MS = 5_000; // 5 seconds
|
|||
/** SIGTERM handler registered while auto-mode is active — cleared on stop/pause. */
|
||||
let _sigtermHandler: (() => void) | null = null;
|
||||
|
||||
type BudgetAlertLevel = 0 | 75 | 90 | 100;
|
||||
|
||||
export function getBudgetAlertLevel(budgetPct: number): BudgetAlertLevel {
|
||||
if (budgetPct >= 1.0) return 100;
|
||||
if (budgetPct >= 0.90) return 90;
|
||||
if (budgetPct >= 0.75) return 75;
|
||||
return 0;
|
||||
}
|
||||
|
||||
export function getNewBudgetAlertLevel(previousLevel: BudgetAlertLevel, budgetPct: number): BudgetAlertLevel | null {
|
||||
const currentLevel = getBudgetAlertLevel(budgetPct);
|
||||
if (currentLevel === 0 || currentLevel <= previousLevel) return null;
|
||||
return currentLevel;
|
||||
}
|
||||
|
||||
export function getBudgetEnforcementAction(
|
||||
enforcement: BudgetEnforcementMode,
|
||||
budgetPct: number,
|
||||
): "none" | "warn" | "pause" | "halt" {
|
||||
if (budgetPct < 1.0) return "none";
|
||||
if (enforcement === "halt") return "halt";
|
||||
if (enforcement === "pause") return "pause";
|
||||
return "warn";
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a SIGTERM handler that clears the lock file and exits cleanly.
|
||||
* Captures the active base path at registration time so the handler
|
||||
|
|
@ -330,10 +362,12 @@ function startDispatchGapWatchdog(ctx: ExtensionContext, pi: ExtensionAPI): void
|
|||
|
||||
// Auto-mode is active but no unit was dispatched — the state machine stalled.
|
||||
// Re-derive state and attempt a fresh dispatch.
|
||||
ctx.ui.notify(
|
||||
"Dispatch gap detected — no unit dispatched after previous unit completed. Re-evaluating state.",
|
||||
"warning",
|
||||
);
|
||||
if (verbose) {
|
||||
ctx.ui.notify(
|
||||
"Dispatch gap detected — re-evaluating state.",
|
||||
"info",
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
await dispatchNextUnit(ctx, pi);
|
||||
|
|
@ -353,6 +387,8 @@ export async function stopAuto(ctx?: ExtensionContext, pi?: ExtensionAPI): Promi
|
|||
clearUnitTimeout();
|
||||
if (basePath) clearLock(basePath);
|
||||
clearSkillSnapshot();
|
||||
_dispatching = false;
|
||||
_skipDepth = 0;
|
||||
|
||||
// Remove SIGTERM handler registered at auto-mode start
|
||||
deregisterSigtermHandler();
|
||||
|
|
@ -401,6 +437,7 @@ export async function stopAuto(ctx?: ExtensionContext, pi?: ExtensionAPI): Promi
|
|||
stepMode = false;
|
||||
unitDispatchCount.clear();
|
||||
unitRecoveryCount.clear();
|
||||
lastBudgetAlertLevel = 0;
|
||||
unitLifetimeDispatches.clear();
|
||||
currentUnit = null;
|
||||
currentMilestoneId = null;
|
||||
|
|
@ -461,17 +498,35 @@ async function selfHealRuntimeRecords(base: string, ctx: ExtensionContext): Prom
|
|||
const { listUnitRuntimeRecords } = await import("./unit-runtime.js");
|
||||
const records = listUnitRuntimeRecords(base);
|
||||
let healed = 0;
|
||||
const STALE_THRESHOLD_MS = 60 * 60 * 1000; // 1 hour
|
||||
const now = Date.now();
|
||||
for (const record of records) {
|
||||
const { unitType, unitId } = record;
|
||||
const artifactPath = resolveExpectedArtifactPath(unitType, unitId, base);
|
||||
|
||||
// Case 1: Artifact exists — unit completed but closeout didn't finish
|
||||
if (artifactPath && existsSync(artifactPath)) {
|
||||
// Artifact exists — unit completed but closeout didn't finish.
|
||||
clearUnitRuntimeRecord(base, unitType, unitId);
|
||||
// Also persist completion key if missing
|
||||
const key = `${unitType}/${unitId}`;
|
||||
if (!completedKeySet.has(key)) {
|
||||
persistCompletedKey(base, key);
|
||||
completedKeySet.add(key);
|
||||
}
|
||||
healed++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Case 2: No artifact but record is stale (dispatched > 1h ago, process crashed)
|
||||
const age = now - (record.startedAt ?? 0);
|
||||
if (record.phase === "dispatched" && age > STALE_THRESHOLD_MS) {
|
||||
clearUnitRuntimeRecord(base, unitType, unitId);
|
||||
healed++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (healed > 0) {
|
||||
ctx.ui.notify(`Self-heal: cleared ${healed} stale runtime record(s) with completed artifacts.`, "info");
|
||||
ctx.ui.notify(`Self-heal: cleared ${healed} stale runtime record(s).`, "info");
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — self-heal should never block auto-mode start
|
||||
|
|
@ -505,7 +560,8 @@ export async function startAuto(
|
|||
if (currentMilestoneId) setActiveMilestoneId(base, currentMilestoneId);
|
||||
|
||||
// ── Auto-worktree: re-enter worktree on resume if not already inside ──
|
||||
if (currentMilestoneId && originalBasePath && !isInAutoWorktree(basePath)) {
|
||||
// Skip if already inside a worktree (manual /worktree) to prevent nesting.
|
||||
if (currentMilestoneId && originalBasePath && !isInAutoWorktree(basePath) && !detectWorktreeName(basePath) && !detectWorktreeName(originalBasePath)) {
|
||||
try {
|
||||
const existingWtPath = getAutoWorktreePath(originalBasePath, currentMilestoneId);
|
||||
if (existingWtPath) {
|
||||
|
|
@ -642,6 +698,7 @@ export async function startAuto(
|
|||
basePath = base;
|
||||
unitDispatchCount.clear();
|
||||
unitRecoveryCount.clear();
|
||||
lastBudgetAlertLevel = 0;
|
||||
unitLifetimeDispatches.clear();
|
||||
completedKeySet.clear();
|
||||
loadPersistedKeys(base, completedKeySet);
|
||||
|
|
@ -668,8 +725,22 @@ export async function startAuto(
|
|||
|
||||
// ── Auto-worktree: create or enter worktree for the active milestone ──
|
||||
// Store the original project root before any chdir so we can restore on stop.
|
||||
// Skip if already inside a worktree (manual /worktree or another auto-worktree)
|
||||
// to prevent nested worktree creation.
|
||||
originalBasePath = base;
|
||||
if (currentMilestoneId) {
|
||||
|
||||
const isUnderGsdWorktrees = (p: string): boolean => {
|
||||
// Prevent creating nested auto-worktrees when running from within any
|
||||
// `.gsd/worktrees/...` directory (including manual worktrees).
|
||||
const marker = `${pathSep}.gsd${pathSep}worktrees${pathSep}`;
|
||||
if (p.includes(marker)) {
|
||||
return true;
|
||||
}
|
||||
const worktreesSuffix = `${pathSep}.gsd${pathSep}worktrees`;
|
||||
return p.endsWith(worktreesSuffix);
|
||||
};
|
||||
|
||||
if (currentMilestoneId && !detectWorktreeName(base) && !isUnderGsdWorktrees(base)) {
|
||||
try {
|
||||
const existingWtPath = getAutoWorktreePath(base, currentMilestoneId);
|
||||
if (existingWtPath) {
|
||||
|
|
@ -738,6 +809,43 @@ export async function startAuto(
|
|||
// Self-heal: clear stale runtime records where artifacts already exist
|
||||
await selfHealRuntimeRecords(base, ctx);
|
||||
|
||||
// Self-heal: remove stale .git/index.lock from prior crash.
|
||||
// A stale lock file blocks all git operations (commit, merge, checkout).
|
||||
// Only remove if older than 60 seconds (not from a concurrent process).
|
||||
try {
|
||||
const gitLockFile = join(base, ".git", "index.lock");
|
||||
if (existsSync(gitLockFile)) {
|
||||
const lockAge = Date.now() - statSync(gitLockFile).mtimeMs;
|
||||
if (lockAge > 60_000) {
|
||||
unlinkSync(gitLockFile);
|
||||
ctx.ui.notify("Removed stale .git/index.lock from prior crash.", "info");
|
||||
}
|
||||
}
|
||||
} catch { /* non-fatal */ }
|
||||
|
||||
// Pre-flight: validate milestone queue for multi-milestone runs.
|
||||
// Warn about issues that will cause auto-mode to pause or block.
|
||||
try {
|
||||
const msDir = join(base, ".gsd", "milestones");
|
||||
if (existsSync(msDir)) {
|
||||
const milestoneIds = readdirSync(msDir, { withFileTypes: true })
|
||||
.filter(d => d.isDirectory() && /^M\d{3}/.test(d.name))
|
||||
.map(d => d.name.match(/^(M\d{3})/)?.[1] ?? d.name);
|
||||
if (milestoneIds.length > 1) {
|
||||
const issues: string[] = [];
|
||||
for (const id of milestoneIds) {
|
||||
const draft = resolveMilestoneFile(base, id, "CONTEXT-DRAFT");
|
||||
if (draft) issues.push(`${id}: has CONTEXT-DRAFT.md (will pause for discussion)`);
|
||||
}
|
||||
if (issues.length > 0) {
|
||||
ctx.ui.notify(`Pre-flight: ${milestoneIds.length} milestones queued.\n${issues.map(i => ` ⚠ ${i}`).join("\n")}`, "warning");
|
||||
} else {
|
||||
ctx.ui.notify(`Pre-flight: ${milestoneIds.length} milestones queued. All have full context.`, "info");
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch { /* non-fatal — pre-flight should never block auto-mode */ }
|
||||
|
||||
// Dispatch the first unit
|
||||
await dispatchNextUnit(ctx, pi);
|
||||
}
|
||||
|
|
@ -1414,17 +1522,43 @@ function getRoadmapSlicesSync(): { done: number; total: number; activeSliceTasks
|
|||
|
||||
// ─── Core Loop ────────────────────────────────────────────────────────────────
|
||||
|
||||
/** Tracks recursive skip depth to prevent TUI freeze on cascading completed-unit skips */
|
||||
let _skipDepth = 0;
|
||||
const MAX_SKIP_DEPTH = 20;
|
||||
|
||||
/** Reentrancy guard for dispatchNextUnit itself (not just handleAgentEnd).
|
||||
* Prevents concurrent dispatch from watchdog timers, step wizard, and direct calls
|
||||
* that bypass the _handlingAgentEnd guard. Recursive calls (from skip paths) are
|
||||
* allowed via _skipDepth > 0. */
|
||||
let _dispatching = false;
|
||||
|
||||
async function dispatchNextUnit(
|
||||
ctx: ExtensionContext,
|
||||
pi: ExtensionAPI,
|
||||
): Promise<void> {
|
||||
if (!active || !cmdCtx) {
|
||||
if (active && !cmdCtx) {
|
||||
ctx.ui.notify("Auto-mode dispatch failed: no command context. Run /gsd auto to restart.", "error");
|
||||
ctx.ui.notify("Auto-mode session expired. Run /gsd auto to restart.", "info");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Reentrancy guard: allow recursive calls from skip paths (_skipDepth > 0)
|
||||
// but block concurrent external calls (watchdog, step wizard, etc.)
|
||||
if (_dispatching && _skipDepth === 0) {
|
||||
return; // Another dispatch is in progress — bail silently
|
||||
}
|
||||
_dispatching = true;
|
||||
try {
|
||||
// Recursion depth guard: when many units are skipped in sequence (e.g., after
|
||||
// crash recovery with 10+ completed units), recursive dispatchNextUnit calls
|
||||
// can freeze the TUI or overflow the stack. Yield generously after MAX_SKIP_DEPTH.
|
||||
if (_skipDepth > MAX_SKIP_DEPTH) {
|
||||
_skipDepth = 0;
|
||||
ctx.ui.notify(`Skipped ${MAX_SKIP_DEPTH}+ completed units. Yielding to UI before continuing.`, "info");
|
||||
await new Promise(r => setTimeout(r, 200));
|
||||
}
|
||||
|
||||
// Clear stale directory listing cache so deriveState sees fresh disk state (#431)
|
||||
clearPathCache();
|
||||
// Clear parsed roadmap/plan cache — doctor may have re-populated it with
|
||||
|
|
@ -1441,6 +1575,7 @@ async function dispatchNextUnit(
|
|||
`Milestone ${currentMilestoneId} complete. Advancing to ${mid}: ${midTitle}.`,
|
||||
"info",
|
||||
);
|
||||
sendDesktopNotification("GSD", `Milestone ${currentMilestoneId} complete!`, "success", "milestone");
|
||||
// Reset stuck detection for new milestone
|
||||
unitDispatchCount.clear();
|
||||
unitRecoveryCount.clear();
|
||||
|
|
@ -1460,6 +1595,7 @@ async function dispatchNextUnit(
|
|||
snapshotUnitMetrics(ctx, currentUnit.type, currentUnit.id, currentUnit.startedAt, modelId);
|
||||
saveActivityLog(ctx, basePath, currentUnit.type, currentUnit.id);
|
||||
}
|
||||
sendDesktopNotification("GSD", "All milestones complete!", "success", "milestone");
|
||||
await stopAuto(ctx, pi);
|
||||
return;
|
||||
}
|
||||
|
|
@ -1541,7 +1677,6 @@ async function dispatchNextUnit(
|
|||
if (existsSync(file)) writeFileSync(file, JSON.stringify([]), "utf-8");
|
||||
completedKeySet.clear();
|
||||
} catch { /* non-fatal */ }
|
||||
|
||||
// ── Milestone merge: squash-merge milestone branch to main before stopping ──
|
||||
if (currentMilestoneId && isInAutoWorktree(basePath) && originalBasePath) {
|
||||
try {
|
||||
|
|
@ -1561,7 +1696,7 @@ async function dispatchNextUnit(
|
|||
);
|
||||
}
|
||||
}
|
||||
|
||||
sendDesktopNotification("GSD", `Milestone ${mid} complete!`, "success", "milestone");
|
||||
await stopAuto(ctx, pi);
|
||||
return;
|
||||
}
|
||||
|
|
@ -1573,7 +1708,9 @@ async function dispatchNextUnit(
|
|||
saveActivityLog(ctx, basePath, currentUnit.type, currentUnit.id);
|
||||
}
|
||||
await stopAuto(ctx, pi);
|
||||
ctx.ui.notify(`Blocked: ${state.blockers.join(", ")}. Fix and run /gsd auto.`, "warning");
|
||||
const blockerMsg = `Blocked: ${state.blockers.join(", ")}`;
|
||||
ctx.ui.notify(`${blockerMsg}. Fix and run /gsd auto.`, "warning");
|
||||
sendDesktopNotification("GSD", blockerMsg, "error", "attention");
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -1581,16 +1718,58 @@ async function dispatchNextUnit(
|
|||
// Ensures the UAT file and slice summary are both on main when UAT runs.
|
||||
const prefs = loadEffectiveGSDPreferences()?.preferences;
|
||||
|
||||
// Budget ceiling guard — pause before starting next unit if ceiling is hit
|
||||
// Budget ceiling guard — enforce budget with configurable action
|
||||
const budgetCeiling = prefs?.budget_ceiling;
|
||||
if (budgetCeiling !== undefined) {
|
||||
if (budgetCeiling !== undefined && budgetCeiling > 0) {
|
||||
const currentLedger = getLedger();
|
||||
const totalCost = currentLedger ? getProjectTotals(currentLedger.units).cost : 0;
|
||||
if (totalCost >= budgetCeiling) {
|
||||
ctx.ui.notify(
|
||||
`Budget ceiling ${formatCost(budgetCeiling)} reached (spent ${formatCost(totalCost)}). Pausing auto-mode — /gsd auto to continue.`,
|
||||
"warning",
|
||||
);
|
||||
const budgetPct = totalCost / budgetCeiling;
|
||||
const budgetAlertLevel = getBudgetAlertLevel(budgetPct);
|
||||
const newBudgetAlertLevel = getNewBudgetAlertLevel(lastBudgetAlertLevel, budgetPct);
|
||||
const enforcement = prefs?.budget_enforcement ?? "pause";
|
||||
|
||||
const budgetEnforcementAction = getBudgetEnforcementAction(enforcement, budgetPct);
|
||||
|
||||
if (newBudgetAlertLevel === 100 && budgetEnforcementAction !== "none") {
|
||||
const msg = `Budget ceiling ${formatCost(budgetCeiling)} reached (spent ${formatCost(totalCost)}).`;
|
||||
lastBudgetAlertLevel = newBudgetAlertLevel;
|
||||
if (budgetEnforcementAction === "halt") {
|
||||
ctx.ui.notify(`${msg} Stopping auto-mode.`, "error");
|
||||
sendDesktopNotification("GSD", msg, "error", "budget");
|
||||
await stopAuto(ctx, pi);
|
||||
return;
|
||||
}
|
||||
if (budgetEnforcementAction === "pause") {
|
||||
ctx.ui.notify(`${msg} Pausing auto-mode — /gsd auto to override and continue.`, "warning");
|
||||
sendDesktopNotification("GSD", msg, "warning", "budget");
|
||||
await pauseAuto(ctx, pi);
|
||||
return;
|
||||
}
|
||||
ctx.ui.notify(`${msg} Continuing (enforcement: warn).`, "warning");
|
||||
sendDesktopNotification("GSD", msg, "warning", "budget");
|
||||
} else if (newBudgetAlertLevel === 90) {
|
||||
lastBudgetAlertLevel = newBudgetAlertLevel;
|
||||
ctx.ui.notify(`Budget 90%: ${formatCost(totalCost)} / ${formatCost(budgetCeiling)}`, "warning");
|
||||
sendDesktopNotification("GSD", `Budget 90%: ${formatCost(totalCost)} / ${formatCost(budgetCeiling)}`, "warning", "budget");
|
||||
} else if (newBudgetAlertLevel === 75) {
|
||||
lastBudgetAlertLevel = newBudgetAlertLevel;
|
||||
ctx.ui.notify(`Budget 75%: ${formatCost(totalCost)} / ${formatCost(budgetCeiling)}`, "info");
|
||||
sendDesktopNotification("GSD", `Budget 75%: ${formatCost(totalCost)} / ${formatCost(budgetCeiling)}`, "info", "budget");
|
||||
} else if (budgetAlertLevel === 0) {
|
||||
lastBudgetAlertLevel = 0;
|
||||
}
|
||||
} else {
|
||||
lastBudgetAlertLevel = 0;
|
||||
}
|
||||
|
||||
// Context window guard — pause if approaching context limits
|
||||
const contextThreshold = prefs?.context_pause_threshold ?? 0; // 0 = disabled by default
|
||||
if (contextThreshold > 0 && cmdCtx) {
|
||||
const contextUsage = cmdCtx.getContextUsage();
|
||||
if (contextUsage && contextUsage.percent >= contextThreshold) {
|
||||
const msg = `Context window at ${contextUsage.percent}% (threshold: ${contextThreshold}%). Pausing to prevent truncated output.`;
|
||||
ctx.ui.notify(`${msg} Run /gsd auto to continue (will start fresh session).`, "warning");
|
||||
sendDesktopNotification("GSD", `Context ${contextUsage.percent}% — paused`, "warning", "attention");
|
||||
await pauseAuto(ctx, pi);
|
||||
return;
|
||||
}
|
||||
|
|
@ -1758,7 +1937,7 @@ async function dispatchNextUnit(
|
|||
saveActivityLog(ctx, basePath, currentUnit.type, currentUnit.id);
|
||||
}
|
||||
await stopAuto(ctx, pi);
|
||||
ctx.ui.notify(`Unexpected phase: ${state.phase}. Stopping auto-mode.`, "warning");
|
||||
ctx.ui.notify(`Unhandled phase "${state.phase}" — run /gsd doctor to diagnose.`, "info");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
@ -1804,10 +1983,10 @@ async function dispatchNextUnit(
|
|||
`Skipping ${unitType} ${unitId} — already completed in a prior session. Advancing.`,
|
||||
"info",
|
||||
);
|
||||
// Yield to the event loop before re-dispatching to avoid tight recursion
|
||||
// when many units are already completed (e.g., after crash recovery).
|
||||
await new Promise(r => setImmediate(r));
|
||||
_skipDepth++;
|
||||
await new Promise(r => setTimeout(r, 50));
|
||||
await dispatchNextUnit(ctx, pi);
|
||||
_skipDepth = Math.max(0, _skipDepth - 1);
|
||||
return;
|
||||
} else {
|
||||
// Stale completion record — artifact missing. Remove and re-run.
|
||||
|
|
@ -1820,6 +1999,26 @@ async function dispatchNextUnit(
|
|||
}
|
||||
}
|
||||
|
||||
// Fallback: if the idempotency key is missing but the expected artifact already
|
||||
// exists on disk, the task completed in a prior session without persisting the key.
|
||||
// Persist it now and skip re-dispatch. This prevents infinite loops where a task
|
||||
// completes successfully but the completion key was never written (e.g., completed
|
||||
// on the first attempt before hitting the retry-threshold persistence logic).
|
||||
if (verifyExpectedArtifact(unitType, unitId, basePath)) {
|
||||
persistCompletedKey(basePath, idempotencyKey);
|
||||
completedKeySet.add(idempotencyKey);
|
||||
invalidateStateCache();
|
||||
ctx.ui.notify(
|
||||
`Skipping ${unitType} ${unitId} — artifact exists but completion key was missing. Repaired and advancing.`,
|
||||
"info",
|
||||
);
|
||||
_skipDepth++;
|
||||
await new Promise(r => setTimeout(r, 50));
|
||||
await dispatchNextUnit(ctx, pi);
|
||||
_skipDepth = Math.max(0, _skipDepth - 1);
|
||||
return;
|
||||
}
|
||||
|
||||
// Stuck detection — tracks total dispatches per unit (not just consecutive repeats).
|
||||
// Pattern A→B→A→B would reset retryCount every time; this map catches it.
|
||||
const dispatchKey = `${unitType}/${unitId}`;
|
||||
|
|
@ -1907,9 +2106,33 @@ async function dispatchNextUnit(
|
|||
return;
|
||||
}
|
||||
|
||||
// Last resort for complete-milestone: generate stub summary to unblock pipeline.
|
||||
// All slices are done (otherwise we wouldn't be in completing-milestone phase),
|
||||
// but the LLM failed to write the summary N times. A stub lets the pipeline advance.
|
||||
if (unitType === "complete-milestone") {
|
||||
try {
|
||||
const mPath = resolveMilestonePath(basePath, unitId);
|
||||
if (mPath) {
|
||||
const stubPath = join(mPath, `${unitId}-SUMMARY.md`);
|
||||
if (!existsSync(stubPath)) {
|
||||
writeFileSync(stubPath, `# ${unitId} Summary\n\nAuto-generated stub — milestone tasks completed but summary generation failed after ${prevCount + 1} attempts.\nReview and replace this stub with a proper summary.\n`);
|
||||
ctx.ui.notify(`Generated stub summary for ${unitId} to unblock pipeline. Review later.`, "warning");
|
||||
persistCompletedKey(basePath, dispatchKey);
|
||||
completedKeySet.add(dispatchKey);
|
||||
unitDispatchCount.delete(dispatchKey);
|
||||
invalidateStateCache();
|
||||
await new Promise(r => setImmediate(r));
|
||||
await dispatchNextUnit(ctx, pi);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch { /* non-fatal — fall through to normal stop */ }
|
||||
}
|
||||
|
||||
const expected = diagnoseExpectedArtifact(unitType, unitId, basePath);
|
||||
const remediation = buildLoopRemediationSteps(unitType, unitId, basePath);
|
||||
await stopAuto(ctx, pi);
|
||||
sendDesktopNotification("GSD", `Loop detected: ${unitType} ${unitId}`, "error", "error");
|
||||
ctx.ui.notify(
|
||||
`Loop detected: ${unitType} ${unitId} dispatched ${prevCount + 1} times total. Expected artifact not found.${expected ? `\n Expected: ${expected}` : ""}${remediation ? `\n\n Remediation steps:\n${remediation}` : "\n Check branch state and .gsd/ artifacts."}`,
|
||||
"error",
|
||||
|
|
@ -2035,7 +2258,7 @@ async function dispatchNextUnit(
|
|||
const result = await cmdCtx!.newSession();
|
||||
if (result.cancelled) {
|
||||
await stopAuto(ctx, pi);
|
||||
ctx.ui.notify("New session cancelled — auto-mode stopped.", "warning");
|
||||
ctx.ui.notify("Auto-mode stopped.", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -2141,7 +2364,7 @@ async function dispatchNextUnit(
|
|||
}
|
||||
}
|
||||
if (!model) {
|
||||
ctx.ui.notify(`Model ${modelId} not found in available models, trying fallback.`, "warning");
|
||||
if (verbose) ctx.ui.notify(`Model ${modelId} not found, trying fallback.`, "info");
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
@ -2157,25 +2380,14 @@ async function dispatchNextUnit(
|
|||
} else {
|
||||
const nextModel = modelsToTry[modelsToTry.indexOf(modelId) + 1];
|
||||
if (nextModel) {
|
||||
ctx.ui.notify(
|
||||
`Failed to set model ${modelId}, trying fallback ${nextModel}...`,
|
||||
"warning",
|
||||
);
|
||||
if (verbose) ctx.ui.notify(`Failed to set model ${modelId}, trying ${nextModel}...`, "info");
|
||||
} else {
|
||||
ctx.ui.notify(
|
||||
`Failed to set model ${modelId} and all fallbacks exhausted. Using default model.`,
|
||||
"warning",
|
||||
);
|
||||
ctx.ui.notify(`All preferred models unavailable for ${unitType}. Using default.`, "warning");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!modelSet) {
|
||||
ctx.ui.notify(
|
||||
`Could not set any preferred model for ${unitType}. Continuing with default.`,
|
||||
"warning",
|
||||
);
|
||||
}
|
||||
// modelSet=false is already handled by the "all fallbacks exhausted" warning above
|
||||
}
|
||||
|
||||
// Start progress-aware supervision: a soft warning, an idle watchdog, and
|
||||
|
|
@ -2288,6 +2500,9 @@ async function dispatchNextUnit(
|
|||
);
|
||||
await pauseAuto(ctx, pi);
|
||||
}
|
||||
} finally {
|
||||
_dispatching = false;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Skill Discovery ──────────────────────────────────────────────────────────
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import { fileURLToPath } from "node:url";
|
|||
import { deriveState } from "./state.js";
|
||||
import { GSDDashboardOverlay } from "./dashboard-overlay.js";
|
||||
import { showQueue, showDiscuss } from "./guided-flow.js";
|
||||
import { startAuto, stopAuto, isAutoActive, isAutoPaused, isStepMode } from "./auto.js";
|
||||
import { startAuto, stopAuto, pauseAuto, isAutoActive, isAutoPaused, isStepMode } from "./auto.js";
|
||||
import {
|
||||
getGlobalGSDPreferencesPath,
|
||||
getLegacyGlobalGSDPreferencesPath,
|
||||
|
|
@ -33,6 +33,9 @@ import {
|
|||
import { loadPrompt } from "./prompt-loader.js";
|
||||
import { handleMigrate } from "./migrate/command.js";
|
||||
import { handleRemote } from "../remote-questions/remote-command.js";
|
||||
import { handleHistory } from "./history.js";
|
||||
import { handleUndo } from "./undo.js";
|
||||
import { handleExport } from "./export.js";
|
||||
|
||||
function dispatchDoctorHeal(pi: ExtensionAPI, scope: string | undefined, reportText: string, structuredIssues: string): void {
|
||||
const workflowPath = process.env.GSD_WORKFLOW_PATH ?? join(process.env.HOME ?? "~", ".pi", "GSD-WORKFLOW.md");
|
||||
|
|
@ -54,10 +57,13 @@ function dispatchDoctorHeal(pi: ExtensionAPI, scope: string | undefined, reportT
|
|||
|
||||
export function registerGSDCommand(pi: ExtensionAPI): void {
|
||||
pi.registerCommand("gsd", {
|
||||
description: "GSD — Get Shit Done: /gsd next|auto|stop|status|queue|prefs|config|hooks|doctor|migrate|remote",
|
||||
|
||||
description: "GSD — Get Shit Done: /gsd next|auto|stop|pause|status|queue|history|undo|skip|export|cleanup|prefs|config|hooks|doctor|migrate|remote",
|
||||
getArgumentCompletions: (prefix: string) => {
|
||||
const subcommands = ["next", "auto", "stop", "status", "queue", "discuss", "prefs", "config", "hooks", "doctor", "migrate", "remote"];
|
||||
const subcommands = [
|
||||
"next", "auto", "stop", "pause", "status", "queue", "discuss",
|
||||
"history", "undo", "skip", "export", "cleanup", "prefs",
|
||||
"config", "hooks", "doctor", "migrate", "remote",
|
||||
];
|
||||
const parts = prefix.trim().split(/\s+/);
|
||||
|
||||
if (parts.length <= 1) {
|
||||
|
|
@ -87,6 +93,38 @@ export function registerGSDCommand(pi: ExtensionAPI): void {
|
|||
.map((cmd) => ({ value: `remote ${cmd}`, label: cmd }));
|
||||
}
|
||||
|
||||
if (parts[0] === "next" && parts.length <= 2) {
|
||||
const flagPrefix = parts[1] ?? "";
|
||||
return ["--verbose", "--dry-run"]
|
||||
.filter((f) => f.startsWith(flagPrefix))
|
||||
.map((f) => ({ value: `next ${f}`, label: f }));
|
||||
}
|
||||
|
||||
if (parts[0] === "history" && parts.length <= 2) {
|
||||
const flagPrefix = parts[1] ?? "";
|
||||
return ["--cost", "--phase", "--model", "10", "20", "50"]
|
||||
.filter((f) => f.startsWith(flagPrefix))
|
||||
.map((f) => ({ value: `history ${f}`, label: f }));
|
||||
}
|
||||
|
||||
if (parts[0] === "undo" && parts.length <= 2) {
|
||||
return [{ value: "undo --force", label: "--force" }];
|
||||
}
|
||||
|
||||
if (parts[0] === "export" && parts.length <= 2) {
|
||||
const flagPrefix = parts[1] ?? "";
|
||||
return ["--json", "--markdown"]
|
||||
.filter((f) => f.startsWith(flagPrefix))
|
||||
.map((f) => ({ value: `export ${f}`, label: f }));
|
||||
}
|
||||
|
||||
if (parts[0] === "cleanup" && parts.length <= 2) {
|
||||
const subPrefix = parts[1] ?? "";
|
||||
return ["branches", "snapshots"]
|
||||
.filter((cmd) => cmd.startsWith(subPrefix))
|
||||
.map((cmd) => ({ value: `cleanup ${cmd}`, label: cmd }));
|
||||
}
|
||||
|
||||
if (parts[0] === "doctor") {
|
||||
const modePrefix = parts[1] ?? "";
|
||||
const modes = ["fix", "heal", "audit"];
|
||||
|
|
@ -122,6 +160,10 @@ export function registerGSDCommand(pi: ExtensionAPI): void {
|
|||
}
|
||||
|
||||
if (trimmed === "next" || trimmed.startsWith("next ")) {
|
||||
if (trimmed.includes("--dry-run")) {
|
||||
await handleDryRun(ctx, process.cwd());
|
||||
return;
|
||||
}
|
||||
const verboseMode = trimmed.includes("--verbose");
|
||||
await startAuto(ctx, pi, process.cwd(), verboseMode, { step: true });
|
||||
return;
|
||||
|
|
@ -142,6 +184,49 @@ export function registerGSDCommand(pi: ExtensionAPI): void {
|
|||
return;
|
||||
}
|
||||
|
||||
if (trimmed === "pause") {
|
||||
if (!isAutoActive()) {
|
||||
if (isAutoPaused()) {
|
||||
ctx.ui.notify("Auto-mode is already paused. /gsd auto to resume.", "info");
|
||||
} else {
|
||||
ctx.ui.notify("Auto-mode is not running.", "info");
|
||||
}
|
||||
return;
|
||||
}
|
||||
await pauseAuto(ctx, pi);
|
||||
return;
|
||||
}
|
||||
|
||||
if (trimmed === "history" || trimmed.startsWith("history ")) {
|
||||
await handleHistory(trimmed.replace(/^history\s*/, "").trim(), ctx, process.cwd());
|
||||
return;
|
||||
}
|
||||
|
||||
if (trimmed === "undo" || trimmed.startsWith("undo ")) {
|
||||
await handleUndo(trimmed.replace(/^undo\s*/, "").trim(), ctx, pi, process.cwd());
|
||||
return;
|
||||
}
|
||||
|
||||
if (trimmed.startsWith("skip ")) {
|
||||
await handleSkip(trimmed.replace(/^skip\s*/, "").trim(), ctx, process.cwd());
|
||||
return;
|
||||
}
|
||||
|
||||
if (trimmed === "export" || trimmed.startsWith("export ")) {
|
||||
await handleExport(trimmed.replace(/^export\s*/, "").trim(), ctx, process.cwd());
|
||||
return;
|
||||
}
|
||||
|
||||
if (trimmed === "cleanup branches") {
|
||||
await handleCleanupBranches(ctx, process.cwd());
|
||||
return;
|
||||
}
|
||||
|
||||
if (trimmed === "cleanup snapshots") {
|
||||
await handleCleanupSnapshots(ctx, process.cwd());
|
||||
return;
|
||||
}
|
||||
|
||||
if (trimmed === "queue") {
|
||||
await showQueue(ctx, pi, process.cwd());
|
||||
return;
|
||||
|
|
@ -180,7 +265,7 @@ export function registerGSDCommand(pi: ExtensionAPI): void {
|
|||
}
|
||||
|
||||
ctx.ui.notify(
|
||||
`Unknown: /gsd ${trimmed}. Use /gsd, /gsd next, /gsd auto, /gsd stop, /gsd status, /gsd queue, /gsd discuss, /gsd prefs, /gsd config, /gsd hooks, /gsd doctor [audit|fix|heal] [M###/S##], /gsd migrate <path>, or /gsd remote [slack|discord|status|disconnect].`,
|
||||
`Unknown: /gsd ${trimmed}. Use /gsd next|auto|stop|pause|status|queue|discuss|history|undo|skip <unit>|export|cleanup|prefs|config|hooks|doctor|migrate|remote.`,
|
||||
"warning",
|
||||
);
|
||||
},
|
||||
|
|
@ -626,3 +711,221 @@ async function ensurePreferencesFile(
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
// ─── Skip handler ─────────────────────────────────────────────────────────────
|
||||
|
||||
async function handleSkip(unitArg: string, ctx: ExtensionCommandContext, basePath: string): Promise<void> {
|
||||
if (!unitArg) {
|
||||
ctx.ui.notify("Usage: /gsd skip <unit-id> (e.g., /gsd skip execute-task/M001/S01/T03 or /gsd skip T03)", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
const { existsSync: fileExists, writeFileSync: writeFile, mkdirSync: mkDir, readFileSync: readFile } = await import("node:fs");
|
||||
const { join: pathJoin } = await import("node:path");
|
||||
|
||||
const completedKeysFile = pathJoin(basePath, ".gsd", "completed-units.json");
|
||||
let keys: string[] = [];
|
||||
try {
|
||||
if (fileExists(completedKeysFile)) {
|
||||
keys = JSON.parse(readFile(completedKeysFile, "utf-8"));
|
||||
}
|
||||
} catch { /* start fresh */ }
|
||||
|
||||
// Normalize: accept "execute-task/M001/S01/T03", "M001/S01/T03", or just "T03"
|
||||
let skipKey = unitArg;
|
||||
|
||||
if (!skipKey.includes("execute-task") && !skipKey.includes("plan-") && !skipKey.includes("research-") && !skipKey.includes("complete-")) {
|
||||
const state = await deriveState(basePath);
|
||||
const mid = state.activeMilestone?.id;
|
||||
const sid = state.activeSlice?.id;
|
||||
|
||||
if (unitArg.match(/^T\d+$/i) && mid && sid) {
|
||||
skipKey = `execute-task/${mid}/${sid}/${unitArg.toUpperCase()}`;
|
||||
} else if (unitArg.match(/^S\d+$/i) && mid) {
|
||||
skipKey = `plan-slice/${mid}/${unitArg.toUpperCase()}`;
|
||||
} else if (unitArg.includes("/")) {
|
||||
skipKey = `execute-task/${unitArg}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (keys.includes(skipKey)) {
|
||||
ctx.ui.notify(`Already skipped: ${skipKey}`, "info");
|
||||
return;
|
||||
}
|
||||
|
||||
keys.push(skipKey);
|
||||
mkDir(pathJoin(basePath, ".gsd"), { recursive: true });
|
||||
writeFile(completedKeysFile, JSON.stringify(keys), "utf-8");
|
||||
|
||||
ctx.ui.notify(`Skipped: ${skipKey}. Will not be dispatched in auto-mode.`, "success");
|
||||
}
|
||||
|
||||
// ─── Dry-run handler ──────────────────────────────────────────────────────────
|
||||
|
||||
async function handleDryRun(ctx: ExtensionCommandContext, basePath: string): Promise<void> {
|
||||
const state = await deriveState(basePath);
|
||||
|
||||
if (!state.activeMilestone) {
|
||||
ctx.ui.notify("No active milestone — nothing to dispatch.", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
const { getLedger, getProjectTotals, formatCost, formatTokenCount, loadLedgerFromDisk } = await import("./metrics.js");
|
||||
const { loadEffectiveGSDPreferences: loadPrefs } = await import("./preferences.js");
|
||||
const { formatDuration } = await import("./history.js");
|
||||
|
||||
const ledger = getLedger();
|
||||
const units = ledger?.units ?? loadLedgerFromDisk(basePath)?.units ?? [];
|
||||
const prefs = loadPrefs()?.preferences;
|
||||
|
||||
let nextType = "unknown";
|
||||
let nextId = "unknown";
|
||||
|
||||
const mid = state.activeMilestone.id;
|
||||
const midTitle = state.activeMilestone.title;
|
||||
|
||||
if (state.phase === "pre-planning") {
|
||||
nextType = "research-milestone";
|
||||
nextId = mid;
|
||||
} else if (state.phase === "planning" && state.activeSlice) {
|
||||
nextType = "plan-slice";
|
||||
nextId = `${mid}/${state.activeSlice.id}`;
|
||||
} else if (state.phase === "executing" && state.activeTask && state.activeSlice) {
|
||||
nextType = "execute-task";
|
||||
nextId = `${mid}/${state.activeSlice.id}/${state.activeTask.id}`;
|
||||
} else if (state.phase === "summarizing" && state.activeSlice) {
|
||||
nextType = "complete-slice";
|
||||
nextId = `${mid}/${state.activeSlice.id}`;
|
||||
} else if (state.phase === "completing-milestone") {
|
||||
nextType = "complete-milestone";
|
||||
nextId = mid;
|
||||
} else {
|
||||
nextType = state.phase;
|
||||
nextId = mid;
|
||||
}
|
||||
|
||||
const sameTypeUnits = units.filter(u => u.type === nextType);
|
||||
const avgCost = sameTypeUnits.length > 0
|
||||
? sameTypeUnits.reduce((s, u) => s + u.cost, 0) / sameTypeUnits.length
|
||||
: null;
|
||||
const avgDuration = sameTypeUnits.length > 0
|
||||
? sameTypeUnits.reduce((s, u) => s + (u.finishedAt - u.startedAt), 0) / sameTypeUnits.length
|
||||
: null;
|
||||
|
||||
const totals = units.length > 0 ? getProjectTotals(units) : null;
|
||||
const budgetRemaining = prefs?.budget_ceiling && totals
|
||||
? prefs.budget_ceiling - totals.cost
|
||||
: null;
|
||||
|
||||
const lines = [
|
||||
`Dry-run preview:`,
|
||||
``,
|
||||
` Next unit: ${nextType}`,
|
||||
` ID: ${nextId}`,
|
||||
` Milestone: ${mid}: ${midTitle}`,
|
||||
` Phase: ${state.phase}`,
|
||||
` Est. cost: ${avgCost !== null ? `${formatCost(avgCost)} (avg of ${sameTypeUnits.length} similar)` : "unknown (first of this type)"}`,
|
||||
` Est. duration: ${avgDuration !== null ? formatDuration(avgDuration) : "unknown"}`,
|
||||
` Spent so far: ${totals ? formatCost(totals.cost) : "$0"}`,
|
||||
` Budget left: ${budgetRemaining !== null ? formatCost(budgetRemaining) : "no ceiling set"}`,
|
||||
];
|
||||
|
||||
if (state.progress) {
|
||||
const p = state.progress;
|
||||
lines.push(` Progress: ${p.tasks?.done ?? 0}/${p.tasks?.total ?? "?"} tasks, ${p.slices?.done ?? 0}/${p.slices?.total ?? "?"} slices`);
|
||||
}
|
||||
|
||||
ctx.ui.notify(lines.join("\n"), "info");
|
||||
}
|
||||
|
||||
// ─── Branch cleanup handler ──────────────────────────────────────────────────
|
||||
|
||||
async function handleCleanupBranches(ctx: ExtensionCommandContext, basePath: string): Promise<void> {
|
||||
const { execFileSync } = await import("node:child_process");
|
||||
|
||||
let branches: string[];
|
||||
try {
|
||||
const output = execFileSync("git", ["branch", "--list", "gsd/*"], { cwd: basePath, timeout: 10000, encoding: "utf-8" });
|
||||
branches = output.split("\n").map(b => b.trim().replace(/^\* /, "")).filter(Boolean);
|
||||
} catch {
|
||||
ctx.ui.notify("No GSD branches found.", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
if (branches.length === 0) {
|
||||
ctx.ui.notify("No GSD branches to clean up.", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
let mainBranch: string;
|
||||
try {
|
||||
mainBranch = execFileSync("git", ["symbolic-ref", "refs/remotes/origin/HEAD", "--short"], { cwd: basePath, timeout: 5000, encoding: "utf-8", stdio: ["ignore", "pipe", "ignore"] })
|
||||
.trim().replace("origin/", "");
|
||||
} catch {
|
||||
mainBranch = "main";
|
||||
}
|
||||
|
||||
let merged: string[];
|
||||
try {
|
||||
const output = execFileSync("git", ["branch", "--merged", mainBranch, "--list", "gsd/*"], { cwd: basePath, timeout: 10000, encoding: "utf-8" });
|
||||
merged = output.split("\n").map(b => b.trim()).filter(Boolean);
|
||||
} catch {
|
||||
merged = [];
|
||||
}
|
||||
|
||||
if (merged.length === 0) {
|
||||
ctx.ui.notify(`${branches.length} GSD branches found, none are merged into ${mainBranch} yet.`, "info");
|
||||
return;
|
||||
}
|
||||
|
||||
let deleted = 0;
|
||||
for (const branch of merged) {
|
||||
try {
|
||||
execFileSync("git", ["branch", "-d", branch], { cwd: basePath, timeout: 5000, stdio: "ignore" });
|
||||
deleted++;
|
||||
} catch { /* skip branches that can't be deleted */ }
|
||||
}
|
||||
|
||||
ctx.ui.notify(`Cleaned up ${deleted} merged branches. ${branches.length - deleted} remain.`, "success");
|
||||
}
|
||||
|
||||
// ─── Snapshot cleanup handler ─────────────────────────────────────────────────
|
||||
|
||||
async function handleCleanupSnapshots(ctx: ExtensionCommandContext, basePath: string): Promise<void> {
|
||||
const { execFileSync } = await import("node:child_process");
|
||||
|
||||
let refs: string[];
|
||||
try {
|
||||
const output = execFileSync("git", ["for-each-ref", "refs/gsd/snapshots/", "--format=%(refname)"], { cwd: basePath, timeout: 10000, encoding: "utf-8" });
|
||||
refs = output.split("\n").filter(Boolean);
|
||||
} catch {
|
||||
ctx.ui.notify("No snapshot refs found.", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
if (refs.length === 0) {
|
||||
ctx.ui.notify("No snapshot refs to clean up.", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
const byLabel = new Map<string, string[]>();
|
||||
for (const ref of refs) {
|
||||
const parts = ref.split("/");
|
||||
const label = parts.slice(0, -1).join("/");
|
||||
if (!byLabel.has(label)) byLabel.set(label, []);
|
||||
byLabel.get(label)!.push(ref);
|
||||
}
|
||||
|
||||
let pruned = 0;
|
||||
for (const [, labelRefs] of byLabel) {
|
||||
const sorted = labelRefs.sort();
|
||||
for (const old of sorted.slice(0, -5)) {
|
||||
try {
|
||||
execFileSync("git", ["update-ref", "-d", old], { cwd: basePath, timeout: 5000, stdio: "ignore" });
|
||||
pruned++;
|
||||
} catch { /* skip */ }
|
||||
}
|
||||
}
|
||||
|
||||
ctx.ui.notify(`Pruned ${pruned} old snapshot refs. ${refs.length - pruned} remain.`, "success");
|
||||
}
|
||||
|
|
|
|||
100
src/resources/extensions/gsd/export.ts
Normal file
100
src/resources/extensions/gsd/export.ts
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
// GSD Extension — Session/Milestone Export
|
||||
// Generate shareable reports of milestone work in JSON or markdown format.
|
||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||
|
||||
import type { ExtensionCommandContext } from "@gsd/pi-coding-agent";
|
||||
import { writeFileSync, mkdirSync } from "node:fs";
|
||||
import { join, basename } from "node:path";
|
||||
import {
|
||||
getLedger, getProjectTotals, aggregateByPhase, aggregateBySlice,
|
||||
aggregateByModel, formatCost, formatTokenCount,
|
||||
} from "./metrics.js";
|
||||
import type { UnitMetrics } from "./metrics.js";
|
||||
import { gsdRoot } from "./paths.js";
|
||||
import { formatDuration } from "./history.js";
|
||||
|
||||
/**
|
||||
* Export session/milestone data to JSON or markdown.
|
||||
*/
|
||||
export async function handleExport(args: string, ctx: ExtensionCommandContext, basePath: string): Promise<void> {
|
||||
const format = args.includes("--json") ? "json" : "markdown";
|
||||
|
||||
const ledger = getLedger();
|
||||
let units: UnitMetrics[];
|
||||
|
||||
if (ledger && ledger.units.length > 0) {
|
||||
units = ledger.units;
|
||||
} else {
|
||||
const { loadLedgerFromDisk } = await import("./metrics.js");
|
||||
const diskLedger = loadLedgerFromDisk(basePath);
|
||||
if (!diskLedger || diskLedger.units.length === 0) {
|
||||
ctx.ui.notify("Nothing to export — no units executed yet.", "info");
|
||||
return;
|
||||
}
|
||||
units = diskLedger.units;
|
||||
}
|
||||
|
||||
const projectName = basename(basePath);
|
||||
const exportDir = gsdRoot(basePath);
|
||||
mkdirSync(exportDir, { recursive: true });
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, "-").slice(0, 19);
|
||||
|
||||
if (format === "json") {
|
||||
const report = {
|
||||
exportedAt: new Date().toISOString(),
|
||||
project: projectName,
|
||||
totals: getProjectTotals(units),
|
||||
byPhase: aggregateByPhase(units),
|
||||
bySlice: aggregateBySlice(units),
|
||||
byModel: aggregateByModel(units),
|
||||
units,
|
||||
};
|
||||
const outPath = join(exportDir, `export-${timestamp}.json`);
|
||||
writeFileSync(outPath, JSON.stringify(report, null, 2) + "\n", "utf-8");
|
||||
ctx.ui.notify(`Exported to ${outPath}`, "success");
|
||||
} else {
|
||||
const totals = getProjectTotals(units);
|
||||
const phases = aggregateByPhase(units);
|
||||
const slices = aggregateBySlice(units);
|
||||
|
||||
const md = [
|
||||
`# GSD Session Report — ${projectName}`,
|
||||
``,
|
||||
`**Generated**: ${new Date().toISOString()}`,
|
||||
`**Units completed**: ${totals.units}`,
|
||||
`**Total cost**: ${formatCost(totals.cost)}`,
|
||||
`**Total tokens**: ${formatTokenCount(totals.tokens.total)}`,
|
||||
`**Total duration**: ${formatDuration(totals.duration)}`,
|
||||
`**Tool calls**: ${totals.toolCalls}`,
|
||||
``,
|
||||
`## Cost by Phase`,
|
||||
``,
|
||||
`| Phase | Units | Cost | Tokens | Duration |`,
|
||||
`|-------|-------|------|--------|----------|`,
|
||||
...phases.map(p =>
|
||||
`| ${p.phase} | ${p.units} | ${formatCost(p.cost)} | ${formatTokenCount(p.tokens.total)} | ${formatDuration(p.duration)} |`,
|
||||
),
|
||||
``,
|
||||
`## Cost by Slice`,
|
||||
``,
|
||||
`| Slice | Units | Cost | Tokens | Duration |`,
|
||||
`|-------|-------|------|--------|----------|`,
|
||||
...slices.map(s =>
|
||||
`| ${s.sliceId} | ${s.units} | ${formatCost(s.cost)} | ${formatTokenCount(s.tokens.total)} | ${formatDuration(s.duration)} |`,
|
||||
),
|
||||
``,
|
||||
`## Unit History`,
|
||||
``,
|
||||
`| Type | ID | Model | Cost | Tokens | Duration |`,
|
||||
`|------|-----|-------|------|--------|----------|`,
|
||||
...units.map(u =>
|
||||
`| ${u.type} | ${u.id} | ${u.model.replace(/^claude-/, "")} | ${formatCost(u.cost)} | ${formatTokenCount(u.tokens.total)} | ${formatDuration(u.finishedAt - u.startedAt)} |`,
|
||||
),
|
||||
``,
|
||||
].join("\n");
|
||||
|
||||
const outPath = join(exportDir, `export-${timestamp}.md`);
|
||||
writeFileSync(outPath, md, "utf-8");
|
||||
ctx.ui.notify(`Exported to ${outPath}`, "success");
|
||||
}
|
||||
}
|
||||
|
|
@ -87,6 +87,28 @@ export function ensureGitignore(basePath: string): boolean {
|
|||
existing = readFileSync(gitignorePath, "utf-8");
|
||||
}
|
||||
|
||||
// Self-heal: remove blanket ".gsd/" lines from pre-v2.14.0 projects.
|
||||
// The blanket ignore prevented planning artifacts (.gsd/milestones/) from
|
||||
// being tracked in git, causing artifacts to vanish in worktrees and
|
||||
// triggering loop detection failures. Replace with explicit runtime-only
|
||||
// ignores so planning files are tracked naturally.
|
||||
let modified = false;
|
||||
const lines = existing.split("\n");
|
||||
const filteredLines = lines.filter(line => {
|
||||
const trimmed = line.trim();
|
||||
// Remove standalone ".gsd/" lines (blanket ignore) but keep specific
|
||||
// .gsd/ subpath patterns like ".gsd/activity/" or ".gsd/auto.lock"
|
||||
if (trimmed === ".gsd/" || trimmed === ".gsd") {
|
||||
modified = true;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
if (modified) {
|
||||
existing = filteredLines.join("\n");
|
||||
writeFileSync(gitignorePath, existing, "utf-8");
|
||||
}
|
||||
|
||||
// Parse existing lines (trimmed, ignoring comments and blanks)
|
||||
const existingLines = new Set(
|
||||
existing
|
||||
|
|
@ -98,7 +120,7 @@ export function ensureGitignore(basePath: string): boolean {
|
|||
// Find patterns not yet present
|
||||
const missing = BASELINE_PATTERNS.filter((p) => !existingLines.has(p));
|
||||
|
||||
if (missing.length === 0) return false;
|
||||
if (missing.length === 0) return modified;
|
||||
|
||||
// Build the block to append
|
||||
const block = [
|
||||
|
|
|
|||
162
src/resources/extensions/gsd/history.ts
Normal file
162
src/resources/extensions/gsd/history.ts
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
// GSD Extension — Session History View
|
||||
// Human-readable display of past auto-mode unit executions.
|
||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||
|
||||
import type { ExtensionCommandContext } from "@gsd/pi-coding-agent";
|
||||
import {
|
||||
getLedger, getProjectTotals, formatCost, formatTokenCount,
|
||||
aggregateBySlice, aggregateByPhase, aggregateByModel, loadLedgerFromDisk,
|
||||
} from "./metrics.js";
|
||||
import type { UnitMetrics } from "./metrics.js";
|
||||
|
||||
/**
|
||||
* Show recent unit execution history with cost, tokens, and duration.
|
||||
*/
|
||||
export async function handleHistory(args: string, ctx: ExtensionCommandContext, basePath: string): Promise<void> {
|
||||
const ledger = getLedger();
|
||||
|
||||
// If ledger is null (metrics not initialized from auto-mode), try loading from disk
|
||||
let units: UnitMetrics[];
|
||||
if (ledger && ledger.units.length > 0) {
|
||||
units = ledger.units;
|
||||
} else {
|
||||
const diskLedger = loadLedgerFromDisk(basePath);
|
||||
if (!diskLedger || diskLedger.units.length === 0) {
|
||||
ctx.ui.notify("No history — no units have been executed yet.", "info");
|
||||
return;
|
||||
}
|
||||
units = diskLedger.units;
|
||||
}
|
||||
|
||||
const parsedLimit = parseInt(args.replace(/--\w+/g, "").trim(), 10);
|
||||
const limit = Number.isFinite(parsedLimit) && parsedLimit > 0 ? parsedLimit : 20;
|
||||
const showCost = args.includes("--cost");
|
||||
const showPhase = args.includes("--phase");
|
||||
const showModel = args.includes("--model");
|
||||
|
||||
if (showCost) {
|
||||
return showCostBreakdown(units, ctx);
|
||||
}
|
||||
if (showPhase) {
|
||||
return showPhaseBreakdown(units, ctx);
|
||||
}
|
||||
if (showModel) {
|
||||
return showModelBreakdown(units, ctx);
|
||||
}
|
||||
|
||||
const display = units.slice(-limit).reverse();
|
||||
const totals = getProjectTotals(units);
|
||||
|
||||
const lines: string[] = [
|
||||
`Last ${display.length} of ${units.length} units | Total: ${formatCost(totals.cost)} · ${formatTokenCount(totals.tokens.total)} tokens`,
|
||||
"",
|
||||
padRight("Time", 14) + padRight("Type", 20) + padRight("ID", 16) + padRight("Model", 14) + padRight("Cost", 10) + padRight("Tokens", 10) + "Duration",
|
||||
"─".repeat(98),
|
||||
];
|
||||
|
||||
for (const u of display) {
|
||||
lines.push(
|
||||
padRight(formatRelativeTime(u.finishedAt), 14) +
|
||||
padRight(u.type, 20) +
|
||||
padRight(truncate(u.id, 15), 16) +
|
||||
padRight(shortModel(u.model), 14) +
|
||||
padRight(formatCost(u.cost), 10) +
|
||||
padRight(formatTokenCount(u.tokens.total), 10) +
|
||||
formatDuration(u.finishedAt - u.startedAt),
|
||||
);
|
||||
}
|
||||
|
||||
ctx.ui.notify(lines.join("\n"), "info");
|
||||
}
|
||||
|
||||
function showCostBreakdown(units: UnitMetrics[], ctx: ExtensionCommandContext): void {
|
||||
const slices = aggregateBySlice(units);
|
||||
const lines = [
|
||||
"Cost by slice:",
|
||||
"",
|
||||
padRight("Slice", 16) + padRight("Units", 8) + padRight("Cost", 10) + "Tokens",
|
||||
"─".repeat(50),
|
||||
];
|
||||
for (const s of slices) {
|
||||
lines.push(
|
||||
padRight(s.sliceId, 16) +
|
||||
padRight(String(s.units), 8) +
|
||||
padRight(formatCost(s.cost), 10) +
|
||||
formatTokenCount(s.tokens.total),
|
||||
);
|
||||
}
|
||||
ctx.ui.notify(lines.join("\n"), "info");
|
||||
}
|
||||
|
||||
function showPhaseBreakdown(units: UnitMetrics[], ctx: ExtensionCommandContext): void {
|
||||
const phases = aggregateByPhase(units);
|
||||
const lines = [
|
||||
"Cost by phase:",
|
||||
"",
|
||||
padRight("Phase", 16) + padRight("Units", 8) + padRight("Cost", 10) + padRight("Tokens", 10) + "Duration",
|
||||
"─".repeat(60),
|
||||
];
|
||||
for (const p of phases) {
|
||||
lines.push(
|
||||
padRight(p.phase, 16) +
|
||||
padRight(String(p.units), 8) +
|
||||
padRight(formatCost(p.cost), 10) +
|
||||
padRight(formatTokenCount(p.tokens.total), 10) +
|
||||
formatDuration(p.duration),
|
||||
);
|
||||
}
|
||||
ctx.ui.notify(lines.join("\n"), "info");
|
||||
}
|
||||
|
||||
function showModelBreakdown(units: UnitMetrics[], ctx: ExtensionCommandContext): void {
|
||||
const models = aggregateByModel(units);
|
||||
const lines = [
|
||||
"Cost by model:",
|
||||
"",
|
||||
padRight("Model", 24) + padRight("Units", 8) + padRight("Cost", 10) + "Tokens",
|
||||
"─".repeat(56),
|
||||
];
|
||||
for (const m of models) {
|
||||
lines.push(
|
||||
padRight(shortModel(m.model), 24) +
|
||||
padRight(String(m.units), 8) +
|
||||
padRight(formatCost(m.cost), 10) +
|
||||
formatTokenCount(m.tokens.total),
|
||||
);
|
||||
}
|
||||
ctx.ui.notify(lines.join("\n"), "info");
|
||||
}
|
||||
|
||||
// ─── Formatting helpers ──────────────────────────────────────────────────────
|
||||
|
||||
export function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`;
|
||||
const secs = Math.floor(ms / 1000);
|
||||
if (secs < 60) return `${secs}s`;
|
||||
const mins = Math.floor(secs / 60);
|
||||
const remSecs = secs % 60;
|
||||
if (mins < 60) return `${mins}m ${remSecs}s`;
|
||||
const hours = Math.floor(mins / 60);
|
||||
const remMins = mins % 60;
|
||||
return `${hours}h ${remMins}m`;
|
||||
}
|
||||
|
||||
function formatRelativeTime(timestamp: number): string {
|
||||
const diff = Date.now() - timestamp;
|
||||
if (diff < 60_000) return "just now";
|
||||
if (diff < 3_600_000) return `${Math.floor(diff / 60_000)}m ago`;
|
||||
if (diff < 86_400_000) return `${Math.floor(diff / 3_600_000)}h ago`;
|
||||
return `${Math.floor(diff / 86_400_000)}d ago`;
|
||||
}
|
||||
|
||||
function shortModel(model: string): string {
|
||||
return model.replace(/^claude-/, "").replace(/^anthropic\//, "");
|
||||
}
|
||||
|
||||
function truncate(s: string, maxLen: number): string {
|
||||
return s.length > maxLen ? s.slice(0, maxLen - 1) + "…" : s;
|
||||
}
|
||||
|
||||
function padRight(s: string, len: number): string {
|
||||
return s.length >= len ? s.slice(0, len) : s + " ".repeat(len - s.length);
|
||||
}
|
||||
|
|
@ -347,6 +347,23 @@ function metricsPath(base: string): string {
|
|||
return join(gsdRoot(base), "metrics.json");
|
||||
}
|
||||
|
||||
/**
|
||||
* Load ledger from disk without initializing in-memory state.
|
||||
* Used by history/export commands outside of auto-mode.
|
||||
*/
|
||||
export function loadLedgerFromDisk(base: string): MetricsLedger | null {
|
||||
try {
|
||||
const raw = readFileSync(metricsPath(base), "utf-8");
|
||||
const parsed = JSON.parse(raw);
|
||||
if (parsed.version === 1 && Array.isArray(parsed.units)) {
|
||||
return parsed as MetricsLedger;
|
||||
}
|
||||
} catch {
|
||||
// File doesn't exist or is corrupt
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function loadLedger(base: string): MetricsLedger {
|
||||
try {
|
||||
const raw = readFileSync(metricsPath(base), "utf-8");
|
||||
|
|
|
|||
88
src/resources/extensions/gsd/notifications.ts
Normal file
88
src/resources/extensions/gsd/notifications.ts
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
// GSD Extension — Desktop Notification Helper
|
||||
// Cross-platform desktop notifications for auto-mode events.
|
||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||
|
||||
import { execFileSync } from "node:child_process";
|
||||
import type { NotificationPreferences } from "./types.js";
|
||||
import { loadEffectiveGSDPreferences } from "./preferences.js";
|
||||
|
||||
export type NotifyLevel = "info" | "success" | "warning" | "error";
|
||||
export type NotificationKind = "complete" | "error" | "budget" | "milestone" | "attention";
|
||||
|
||||
interface NotificationCommand {
|
||||
file: string;
|
||||
args: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a native desktop notification. Non-blocking, non-fatal.
|
||||
* macOS: osascript, Linux: notify-send, Windows: skipped.
|
||||
*/
|
||||
export function sendDesktopNotification(
|
||||
title: string,
|
||||
message: string,
|
||||
level: NotifyLevel = "info",
|
||||
kind: NotificationKind = "complete",
|
||||
): void {
|
||||
if (!shouldSendDesktopNotification(kind)) return;
|
||||
|
||||
try {
|
||||
const command = buildDesktopNotificationCommand(process.platform, title, message, level);
|
||||
if (!command) return;
|
||||
execFileSync(command.file, command.args, { timeout: 3000, stdio: "ignore" });
|
||||
} catch {
|
||||
// Non-fatal — desktop notifications are best-effort
|
||||
}
|
||||
}
|
||||
|
||||
export function shouldSendDesktopNotification(
|
||||
kind: NotificationKind,
|
||||
preferences: NotificationPreferences | undefined = loadEffectiveGSDPreferences()?.preferences.notifications,
|
||||
): boolean {
|
||||
if (preferences?.enabled === false) return false;
|
||||
|
||||
switch (kind) {
|
||||
case "error":
|
||||
return preferences?.on_error ?? true;
|
||||
case "budget":
|
||||
return preferences?.on_budget ?? true;
|
||||
case "milestone":
|
||||
return preferences?.on_milestone ?? true;
|
||||
case "attention":
|
||||
return preferences?.on_attention ?? true;
|
||||
case "complete":
|
||||
default:
|
||||
return preferences?.on_complete ?? true;
|
||||
}
|
||||
}
|
||||
|
||||
export function buildDesktopNotificationCommand(
|
||||
platform: NodeJS.Platform,
|
||||
title: string,
|
||||
message: string,
|
||||
level: NotifyLevel = "info",
|
||||
): NotificationCommand | null {
|
||||
const normalizedTitle = normalizeNotificationText(title);
|
||||
const normalizedMessage = normalizeNotificationText(message);
|
||||
|
||||
if (platform === "darwin") {
|
||||
const sound = level === "error" ? 'sound name "Basso"' : 'sound name "Glass"';
|
||||
const script = `display notification "${escapeAppleScript(normalizedMessage)}" with title "${escapeAppleScript(normalizedTitle)}" ${sound}`;
|
||||
return { file: "osascript", args: ["-e", script] };
|
||||
}
|
||||
|
||||
if (platform === "linux") {
|
||||
const urgency = level === "error" ? "critical" : level === "warning" ? "normal" : "low";
|
||||
return { file: "notify-send", args: ["-u", urgency, normalizedTitle, normalizedMessage] };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function normalizeNotificationText(s: string): string {
|
||||
return s.replace(/\r?\n/g, " ").trim();
|
||||
}
|
||||
|
||||
function escapeAppleScript(s: string): string {
|
||||
return s.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
||||
}
|
||||
|
|
@ -3,7 +3,7 @@ import { homedir } from "node:os";
|
|||
import { isAbsolute, join } from "node:path";
|
||||
import { getAgentDir } from "@gsd/pi-coding-agent";
|
||||
import type { GitPreferences } from "./git-service.js";
|
||||
import type { PostUnitHookConfig, PreDispatchHookConfig } from "./types.js";
|
||||
import type { PostUnitHookConfig, PreDispatchHookConfig, BudgetEnforcementMode, NotificationPreferences } from "./types.js";
|
||||
import { VALID_BRANCH_NAME } from "./git-service.js";
|
||||
|
||||
const GLOBAL_PREFERENCES_PATH = join(homedir(), ".gsd", "preferences.md");
|
||||
|
|
@ -92,6 +92,9 @@ export interface GSDPreferences {
|
|||
uat_dispatch?: boolean;
|
||||
unique_milestone_ids?: boolean;
|
||||
budget_ceiling?: number;
|
||||
budget_enforcement?: BudgetEnforcementMode;
|
||||
context_pause_threshold?: number;
|
||||
notifications?: NotificationPreferences;
|
||||
remote_questions?: RemoteQuestionsConfig;
|
||||
git?: GitPreferences;
|
||||
post_unit_hooks?: PostUnitHookConfig[];
|
||||
|
|
|
|||
|
|
@ -91,13 +91,19 @@ Do not count the reflection step as a question round. Rounds start after reflect
|
|||
|
||||
## Depth Verification
|
||||
|
||||
Before moving to the wrap-up gate, present a structured depth summary to the user via `ask_user_questions`. This is a checkpoint — show what you captured across the depth checklist dimensions, using the user's own terminology and framing.
|
||||
Before moving to the wrap-up gate, present a structured depth summary as a checkpoint.
|
||||
|
||||
The question should summarize: what you understood them to be building, what shaped your understanding most (their emphasis, constraints, concerns), and any areas where you're least confident in your understanding. Frame it as: "Before we move to planning, here's what I captured — did I get the depth right?"
|
||||
**Print the summary as normal chat text first** — this is where the formatting renders properly. Structure the summary across the depth checklist dimensions using the user's own terminology and framing. Cover: what you understood them to be building, what shaped your understanding most (their emphasis, constraints, concerns), and any areas where you're least confident in your understanding.
|
||||
|
||||
**Convention:** The question ID must contain `depth_verification` (e.g., `depth_verification_summary`). This naming convention enables downstream mechanical detection of this step.
|
||||
**Then** use `ask_user_questions` with a short confirmation question — NOT the summary itself. The question field is designed for single sentences, not multi-paragraph summaries.
|
||||
|
||||
Offer two options: "Yes, you got it (Recommended)" and "Not quite — let me clarify." If they clarify, absorb the correction and re-verify.
|
||||
**Convention:** The question ID must contain `depth_verification` (e.g., `depth_verification_confirm`). This naming convention enables downstream mechanical detection of this step.
|
||||
|
||||
Example flow:
|
||||
1. Print in chat: the full depth summary with markdown formatting (headers, bold, bullets)
|
||||
2. Call `ask_user_questions` with: header "Depth Check", question "Did I capture the depth right?", options "Yes, you got it (Recommended)" and "Not quite — let me clarify"
|
||||
|
||||
If they clarify, absorb the correction and re-verify.
|
||||
|
||||
## Wrap-up Gate
|
||||
|
||||
|
|
@ -215,6 +221,20 @@ Once the user confirms the milestone split:
|
|||
5. Write a full `CONTEXT.md` for the primary milestone (the one discussed in depth).
|
||||
6. Write a `ROADMAP.md` for **only the primary milestone** — detail-planning later milestones now is waste because the codebase will change. Include requirement coverage and a milestone definition of done.
|
||||
|
||||
#### MANDATORY: depends_on Frontmatter in CONTEXT.md
|
||||
|
||||
Every CONTEXT.md for a milestone that depends on other milestones MUST have YAML frontmatter with `depends_on`. The auto-mode state machine reads this field to determine execution order — without it, milestones may execute out of order or in parallel when they shouldn't.
|
||||
|
||||
```yaml
|
||||
---
|
||||
depends_on: [M001, M002]
|
||||
---
|
||||
|
||||
# M003: Title
|
||||
```
|
||||
|
||||
If a milestone has no dependencies, omit the frontmatter. The dependency chain from the milestone confirmation gate MUST be reflected in each CONTEXT.md frontmatter. Do NOT rely on QUEUE.md or PROJECT.md for dependency tracking — the state machine only reads CONTEXT.md frontmatter.
|
||||
|
||||
#### Phase 3: Sequential readiness gate for remaining milestones
|
||||
|
||||
For each remaining milestone **one at a time, in sequence**, use `ask_user_questions` to assess readiness. Present three options:
|
||||
|
|
|
|||
|
|
@ -82,7 +82,13 @@ Determine where the new milestones should go in the overall sequence. Consider d
|
|||
Once the user is satisfied, in a single pass for **each** new milestone (starting from {{nextId}}):
|
||||
|
||||
1. `mkdir -p .gsd/milestones/<ID>/slices`
|
||||
2. Write `.gsd/milestones/<ID>/<ID>-CONTEXT.md` — use the **Context** output template below. Capture intent, scope, risks, constraints, integration points, and relevant requirements. Mark the status as "Queued — pending auto-mode execution."
|
||||
2. Write `.gsd/milestones/<ID>/<ID>-CONTEXT.md` — use the **Context** output template below. Capture intent, scope, risks, constraints, integration points, and relevant requirements. Mark the status as "Queued — pending auto-mode execution." **If this milestone depends on other milestones, add YAML frontmatter with `depends_on`:**
|
||||
```yaml
|
||||
---
|
||||
depends_on: [M001, M002]
|
||||
---
|
||||
```
|
||||
The auto-mode state machine reads this field to enforce execution order. Without it, milestones may execute out of order. List the exact milestone IDs (including any suffix like `-0zjrg0`) from the dependency chain discussed with the user.
|
||||
|
||||
Then, after all milestone directories and context files are written:
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
|
||||
import {
|
||||
getBudgetAlertLevel,
|
||||
getBudgetEnforcementAction,
|
||||
getNewBudgetAlertLevel,
|
||||
} from "../auto.js";
|
||||
|
||||
test("getBudgetAlertLevel returns the expected threshold bucket", () => {
|
||||
assert.equal(getBudgetAlertLevel(0.10), 0);
|
||||
assert.equal(getBudgetAlertLevel(0.75), 75);
|
||||
assert.equal(getBudgetAlertLevel(0.89), 75);
|
||||
assert.equal(getBudgetAlertLevel(0.90), 90);
|
||||
assert.equal(getBudgetAlertLevel(1.00), 100);
|
||||
});
|
||||
|
||||
test("getNewBudgetAlertLevel only emits once per threshold", () => {
|
||||
assert.equal(getNewBudgetAlertLevel(0, 0.74), null);
|
||||
assert.equal(getNewBudgetAlertLevel(0, 0.75), 75);
|
||||
assert.equal(getNewBudgetAlertLevel(75, 0.80), null);
|
||||
assert.equal(getNewBudgetAlertLevel(75, 0.90), 90);
|
||||
assert.equal(getNewBudgetAlertLevel(90, 0.95), null);
|
||||
assert.equal(getNewBudgetAlertLevel(90, 1.0), 100);
|
||||
assert.equal(getNewBudgetAlertLevel(100, 1.2), null);
|
||||
});
|
||||
|
||||
test("getBudgetEnforcementAction maps the configured ceiling behavior", () => {
|
||||
assert.equal(getBudgetEnforcementAction("warn", 0.99), "none");
|
||||
assert.equal(getBudgetEnforcementAction("warn", 1.0), "warn");
|
||||
assert.equal(getBudgetEnforcementAction("pause", 1.0), "pause");
|
||||
assert.equal(getBudgetEnforcementAction("halt", 1.0), "halt");
|
||||
});
|
||||
67
src/resources/extensions/gsd/tests/notifications.test.ts
Normal file
67
src/resources/extensions/gsd/tests/notifications.test.ts
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
|
||||
import {
|
||||
buildDesktopNotificationCommand,
|
||||
shouldSendDesktopNotification,
|
||||
} from "../notifications.js";
|
||||
import type { NotificationPreferences } from "../types.js";
|
||||
|
||||
test("shouldSendDesktopNotification honors granular preferences", () => {
|
||||
const prefs: NotificationPreferences = {
|
||||
enabled: true,
|
||||
on_complete: false,
|
||||
on_error: true,
|
||||
on_budget: false,
|
||||
on_milestone: true,
|
||||
on_attention: false,
|
||||
};
|
||||
|
||||
assert.equal(shouldSendDesktopNotification("complete", prefs), false);
|
||||
assert.equal(shouldSendDesktopNotification("error", prefs), true);
|
||||
assert.equal(shouldSendDesktopNotification("budget", prefs), false);
|
||||
assert.equal(shouldSendDesktopNotification("milestone", prefs), true);
|
||||
assert.equal(shouldSendDesktopNotification("attention", prefs), false);
|
||||
});
|
||||
|
||||
test("shouldSendDesktopNotification disables all categories when notifications are disabled", () => {
|
||||
const prefs: NotificationPreferences = { enabled: false, on_error: true, on_milestone: true };
|
||||
|
||||
assert.equal(shouldSendDesktopNotification("error", prefs), false);
|
||||
assert.equal(shouldSendDesktopNotification("milestone", prefs), false);
|
||||
});
|
||||
|
||||
test("buildDesktopNotificationCommand uses argument arrays for macOS notifications", () => {
|
||||
const command = buildDesktopNotificationCommand(
|
||||
"darwin",
|
||||
`Bob's "Milestone"`,
|
||||
`Budget!\nPath: C:\\temp`,
|
||||
"error",
|
||||
);
|
||||
|
||||
assert.ok(command);
|
||||
assert.equal(command.file, "osascript");
|
||||
assert.deepEqual(command.args.slice(0, 1), ["-e"]);
|
||||
assert.match(command.args[1], /Bob's \\"Milestone\\"/);
|
||||
assert.match(command.args[1], /Budget! Path: C:\\\\temp/);
|
||||
assert.doesNotMatch(command.args[1], /\n/);
|
||||
});
|
||||
|
||||
test("buildDesktopNotificationCommand preserves literal shell characters on linux", () => {
|
||||
const command = buildDesktopNotificationCommand(
|
||||
"linux",
|
||||
`Bob's $PATH !`,
|
||||
"line 1\nline 2",
|
||||
"warning",
|
||||
);
|
||||
|
||||
assert.ok(command);
|
||||
assert.deepEqual(command, {
|
||||
file: "notify-send",
|
||||
args: ["-u", "normal", `Bob's $PATH !`, "line 1 line 2"],
|
||||
});
|
||||
});
|
||||
|
||||
test("buildDesktopNotificationCommand skips unsupported platforms", () => {
|
||||
assert.equal(buildDesktopNotificationCommand("win32", "Title", "Message"), null);
|
||||
});
|
||||
136
src/resources/extensions/gsd/tests/undo.test.ts
Normal file
136
src/resources/extensions/gsd/tests/undo.test.ts
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { mkdtempSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import {
|
||||
extractCommitShas,
|
||||
findCommitsForUnit,
|
||||
handleUndo,
|
||||
uncheckTaskInPlan,
|
||||
} from "../undo.js";
|
||||
|
||||
function makeTempDir(prefix: string): string {
|
||||
return mkdtempSync(join(tmpdir(), `${prefix}-`));
|
||||
}
|
||||
|
||||
test("handleUndo without --force only warns and leaves completed units intact", async () => {
|
||||
const base = makeTempDir("gsd-undo-confirm");
|
||||
try {
|
||||
mkdirSync(join(base, ".gsd"), { recursive: true });
|
||||
writeFileSync(
|
||||
join(base, ".gsd", "completed-units.json"),
|
||||
JSON.stringify(["execute-task/M001/S01/T01"]),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const notifications: Array<{ message: string; level: string }> = [];
|
||||
const ctx = {
|
||||
ui: {
|
||||
notify(message: string, level: string) {
|
||||
notifications.push({ message, level });
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
await handleUndo("", ctx as any, {} as any, base);
|
||||
|
||||
assert.equal(notifications.length, 1);
|
||||
assert.equal(notifications[0]?.level, "warning");
|
||||
assert.match(notifications[0]?.message ?? "", /Run \/gsd undo --force to confirm\./);
|
||||
assert.deepEqual(
|
||||
JSON.parse(readFileSync(join(base, ".gsd", "completed-units.json"), "utf-8")),
|
||||
["execute-task/M001/S01/T01"],
|
||||
);
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("uncheckTaskInPlan flips a checked task back to unchecked", () => {
|
||||
const base = makeTempDir("gsd-undo-plan");
|
||||
try {
|
||||
const sliceDir = join(base, ".gsd", "milestones", "M001", "slices", "S01");
|
||||
mkdirSync(sliceDir, { recursive: true });
|
||||
const planFile = join(sliceDir, "S01-PLAN.md");
|
||||
writeFileSync(
|
||||
planFile,
|
||||
[
|
||||
"# Slice Plan",
|
||||
"",
|
||||
"- [x] **T01**: Ship the feature",
|
||||
"- [ ] **T02**: Follow-up",
|
||||
].join("\n"),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
assert.equal(uncheckTaskInPlan(base, "M001", "S01", "T01"), true);
|
||||
assert.match(readFileSync(planFile, "utf-8"), /- \[ \] \*\*T01\*\*: Ship the feature/);
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("findCommitsForUnit reads the newest matching activity log and dedupes SHAs", () => {
|
||||
const base = makeTempDir("gsd-undo-activity");
|
||||
try {
|
||||
const activityDir = join(base, ".gsd", "activity");
|
||||
mkdirSync(activityDir, { recursive: true });
|
||||
|
||||
writeFileSync(
|
||||
join(activityDir, "2026-03-14-execute-task-M001-S01-T01.jsonl"),
|
||||
`${JSON.stringify({
|
||||
message: {
|
||||
content: [
|
||||
{ type: "tool_result", content: "[main abc1234] old commit" },
|
||||
],
|
||||
},
|
||||
})}\n`,
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
writeFileSync(
|
||||
join(activityDir, "2026-03-15-execute-task-M001-S01-T01.jsonl"),
|
||||
[
|
||||
JSON.stringify({
|
||||
message: {
|
||||
content: [
|
||||
{ type: "tool_result", content: "[main deadbee] new commit\n[main cafe123] another commit" },
|
||||
{ type: "tool_result", content: "[main deadbee] duplicate commit" },
|
||||
],
|
||||
},
|
||||
}),
|
||||
"{not-json}",
|
||||
].join("\n"),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
assert.deepEqual(
|
||||
findCommitsForUnit(activityDir, "execute-task", "M001/S01/T01"),
|
||||
["deadbee", "cafe123"],
|
||||
);
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("extractCommitShas returns unique commit hashes from git output blocks", () => {
|
||||
const content = [
|
||||
"[main abc1234] first commit",
|
||||
"[feature deadbeef] second commit",
|
||||
"[main abc1234] duplicate commit",
|
||||
].join("\n");
|
||||
|
||||
assert.deepEqual(extractCommitShas(content), ["abc1234", "deadbeef"]);
|
||||
});
|
||||
|
||||
test("extractCommitShas ignores malformed commit tokens", () => {
|
||||
const content = [
|
||||
"[main abc1234; touch /tmp/pwned] not a real sha token",
|
||||
"[main not-a-sha] ignored",
|
||||
"[main 1234567] valid",
|
||||
].join("\n");
|
||||
|
||||
assert.deepEqual(extractCommitShas(content), ["1234567"]);
|
||||
});
|
||||
|
|
@ -234,6 +234,19 @@ export interface HookDispatchResult {
|
|||
unitId: string;
|
||||
}
|
||||
|
||||
// ─── Budget & Notification Types ──────────────────────────────────────────
|
||||
|
||||
export type BudgetEnforcementMode = 'warn' | 'pause' | 'halt';
|
||||
|
||||
export interface NotificationPreferences {
|
||||
enabled?: boolean; // default true
|
||||
on_complete?: boolean; // notify on each unit completion
|
||||
on_error?: boolean; // notify on errors
|
||||
on_budget?: boolean; // notify on budget thresholds
|
||||
on_milestone?: boolean; // notify when milestone finishes
|
||||
on_attention?: boolean; // notify when manual attention needed
|
||||
}
|
||||
|
||||
// ─── Pre-Dispatch Hook Types ──────────────────────────────────────────────
|
||||
|
||||
export interface PreDispatchHookConfig {
|
||||
|
|
|
|||
219
src/resources/extensions/gsd/undo.ts
Normal file
219
src/resources/extensions/gsd/undo.ts
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
// GSD Extension — Undo Last Unit
|
||||
// Rollback the most recent completed unit: revert git, remove state, uncheck plans.
|
||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||
|
||||
import type { ExtensionCommandContext, ExtensionAPI } from "@gsd/pi-coding-agent";
|
||||
import { existsSync, readFileSync, writeFileSync, unlinkSync, readdirSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { execFileSync } from "node:child_process";
|
||||
import { deriveState, invalidateStateCache } from "./state.js";
|
||||
import { gsdRoot, resolveTasksDir, resolveSlicePath, buildTaskFileName } from "./paths.js";
|
||||
import { sendDesktopNotification } from "./notifications.js";
|
||||
|
||||
/**
|
||||
* Undo the last completed unit: revert git commits, remove from completed-units,
|
||||
* delete summary artifacts, and uncheck the task in PLAN.
|
||||
*/
|
||||
export async function handleUndo(args: string, ctx: ExtensionCommandContext, _pi: ExtensionAPI, basePath: string): Promise<void> {
|
||||
const force = args.includes("--force");
|
||||
|
||||
// 1. Load completed-units.json
|
||||
const completedKeysFile = join(gsdRoot(basePath), "completed-units.json");
|
||||
if (!existsSync(completedKeysFile)) {
|
||||
ctx.ui.notify("Nothing to undo — no completed units found.", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
let keys: string[];
|
||||
try {
|
||||
keys = JSON.parse(readFileSync(completedKeysFile, "utf-8"));
|
||||
} catch {
|
||||
ctx.ui.notify("Nothing to undo — completed-units.json is corrupt.", "warning");
|
||||
return;
|
||||
}
|
||||
|
||||
if (keys.length === 0) {
|
||||
ctx.ui.notify("Nothing to undo — no completed units.", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the last completed unit
|
||||
const lastKey = keys[keys.length - 1];
|
||||
const sepIdx = lastKey.indexOf("/");
|
||||
const unitType = sepIdx >= 0 ? lastKey.slice(0, sepIdx) : lastKey;
|
||||
const unitId = sepIdx >= 0 ? lastKey.slice(sepIdx + 1) : lastKey;
|
||||
|
||||
if (!force) {
|
||||
ctx.ui.notify(
|
||||
`Will undo: ${unitType} (${unitId})\n` +
|
||||
`This will:\n` +
|
||||
` - Remove from completed-units.json\n` +
|
||||
` - Delete summary artifacts\n` +
|
||||
` - Uncheck task in PLAN (if execute-task)\n` +
|
||||
` - Attempt to revert associated git commits\n\n` +
|
||||
`Run /gsd undo --force to confirm.`,
|
||||
"warning",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// 2. Remove from completed-units.json
|
||||
keys = keys.filter(k => k !== lastKey);
|
||||
writeFileSync(completedKeysFile, JSON.stringify(keys), "utf-8");
|
||||
|
||||
// 3. Delete summary artifact
|
||||
const parts = unitId.split("/");
|
||||
let summaryRemoved = false;
|
||||
if (parts.length === 3) {
|
||||
// Task-level: M001/S01/T01
|
||||
const [mid, sid, tid] = parts;
|
||||
const tasksDir = resolveTasksDir(basePath, mid, sid);
|
||||
if (tasksDir) {
|
||||
const summaryFile = join(tasksDir, buildTaskFileName(tid, "SUMMARY"));
|
||||
if (existsSync(summaryFile)) {
|
||||
unlinkSync(summaryFile);
|
||||
summaryRemoved = true;
|
||||
}
|
||||
}
|
||||
} else if (parts.length === 2) {
|
||||
// Slice-level: M001/S01
|
||||
const [mid, sid] = parts;
|
||||
const slicePath = resolveSlicePath(basePath, mid, sid);
|
||||
if (slicePath) {
|
||||
// Try common summary filenames
|
||||
for (const suffix of ["SUMMARY", "COMPLETE"]) {
|
||||
const candidates = findFileWithPrefix(slicePath, sid, suffix);
|
||||
for (const f of candidates) {
|
||||
unlinkSync(f);
|
||||
summaryRemoved = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Uncheck task in PLAN if execute-task
|
||||
let planUpdated = false;
|
||||
if (unitType === "execute-task" && parts.length === 3) {
|
||||
const [mid, sid, tid] = parts;
|
||||
planUpdated = uncheckTaskInPlan(basePath, mid, sid, tid);
|
||||
}
|
||||
|
||||
// 5. Try to revert git commits from activity log
|
||||
let commitsReverted = 0;
|
||||
const activityDir = join(gsdRoot(basePath), "activity");
|
||||
if (existsSync(activityDir)) {
|
||||
const commits = findCommitsForUnit(activityDir, unitType, unitId);
|
||||
if (commits.length > 0) {
|
||||
for (const sha of commits.reverse()) {
|
||||
try {
|
||||
execFileSync("git", ["revert", "--no-commit", sha], { cwd: basePath, timeout: 10000, stdio: "ignore" });
|
||||
commitsReverted++;
|
||||
} catch {
|
||||
// Revert conflict or already reverted — skip
|
||||
try { execFileSync("git", ["revert", "--abort"], { cwd: basePath, timeout: 5000, stdio: "ignore" }); } catch { /* no-op */ }
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Re-derive state
|
||||
invalidateStateCache();
|
||||
await deriveState(basePath);
|
||||
|
||||
// Build result message
|
||||
const results: string[] = [`Undone: ${unitType} (${unitId})`];
|
||||
results.push(` - Removed from completed-units.json`);
|
||||
if (summaryRemoved) results.push(` - Deleted summary artifact`);
|
||||
if (planUpdated) results.push(` - Unchecked task in PLAN`);
|
||||
if (commitsReverted > 0) {
|
||||
results.push(` - Reverted ${commitsReverted} commit(s) (staged, not committed)`);
|
||||
results.push(` Review with 'git diff --cached' then 'git commit' or 'git reset HEAD'`);
|
||||
}
|
||||
|
||||
ctx.ui.notify(results.join("\n"), "success");
|
||||
sendDesktopNotification("GSD", `Undone: ${unitType} (${unitId})`, "info", "complete");
|
||||
}
|
||||
|
||||
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export function uncheckTaskInPlan(basePath: string, mid: string, sid: string, tid: string): boolean {
|
||||
const slicePath = resolveSlicePath(basePath, mid, sid);
|
||||
if (!slicePath) return false;
|
||||
|
||||
// Find the PLAN file
|
||||
const planCandidates = findFileWithPrefix(slicePath, sid, "PLAN");
|
||||
if (planCandidates.length === 0) return false;
|
||||
|
||||
const planFile = planCandidates[0];
|
||||
let content = readFileSync(planFile, "utf-8");
|
||||
|
||||
// Match checked task line: - [x] **T01** or - [x] T01:
|
||||
const regex = new RegExp(`^(\\s*-\\s*)\\[x\\](\\s*\\**${tid}\\**[:\\s])`, "mi");
|
||||
if (regex.test(content)) {
|
||||
content = content.replace(regex, "$1[ ]$2");
|
||||
writeFileSync(planFile, content, "utf-8");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function findFileWithPrefix(dir: string, prefix: string, suffix: string): string[] {
|
||||
try {
|
||||
const files = readdirSync(dir);
|
||||
return files
|
||||
.filter(f => f.includes(suffix) && (f.startsWith(prefix) || f.startsWith(`${prefix}-`)))
|
||||
.map(f => join(dir, f));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function findCommitsForUnit(activityDir: string, unitType: string, unitId: string): string[] {
|
||||
const safeUnitId = unitId.replace(/\//g, "-");
|
||||
const commits: string[] = [];
|
||||
|
||||
try {
|
||||
const files = readdirSync(activityDir)
|
||||
.filter(f => f.includes(unitType) && f.includes(safeUnitId) && f.endsWith(".jsonl"))
|
||||
.sort()
|
||||
.reverse();
|
||||
|
||||
if (files.length === 0) return [];
|
||||
|
||||
// Parse the most recent activity log for this unit
|
||||
const content = readFileSync(join(activityDir, files[0]), "utf-8");
|
||||
for (const line of content.split("\n")) {
|
||||
if (!line.trim()) continue;
|
||||
try {
|
||||
const entry = JSON.parse(line);
|
||||
// Look for tool results containing git commit output
|
||||
if (entry?.message?.content) {
|
||||
const blocks = Array.isArray(entry.message.content) ? entry.message.content : [];
|
||||
for (const block of blocks) {
|
||||
if (block.type === "tool_result" && typeof block.content === "string") {
|
||||
for (const sha of extractCommitShas(block.content)) {
|
||||
if (!commits.includes(sha)) {
|
||||
commits.push(sha);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch { /* malformed JSON line — skip */ }
|
||||
}
|
||||
} catch { /* activity dir issues — skip */ }
|
||||
|
||||
return commits;
|
||||
}
|
||||
|
||||
export function extractCommitShas(content: string): string[] {
|
||||
const commits: string[] = [];
|
||||
for (const match of content.matchAll(/\[[\w/.-]+\s+([a-f0-9]{7,40})\]/g)) {
|
||||
const sha = match[1];
|
||||
if (sha && !commits.includes(sha)) {
|
||||
commits.push(sha);
|
||||
}
|
||||
}
|
||||
return commits;
|
||||
}
|
||||
Loading…
Add table
Reference in a new issue