Merge pull request #2282 from jeremymcs/feat/v1-v2-command-parity

feat(gsd): add v1→v2 command parity — 9 missing commands
This commit is contained in:
Jeremy McSpadden 2026-04-14 18:28:21 -05:00 committed by GitHub
commit e09fafc376
15 changed files with 1581 additions and 1 deletions

View file

@ -0,0 +1,137 @@
/**
* GSD Command /gsd add-tests
*
* Generates tests for a completed slice by dispatching an LLM prompt
* with implementation context (summaries, changed files, test patterns).
*/
import type { ExtensionAPI, ExtensionCommandContext } from "@gsd/pi-coding-agent";
import { existsSync, readFileSync, readdirSync } from "node:fs";
import { join } from "node:path";
import { deriveState } from "./state.js";
import { gsdRoot, resolveSliceFile } from "./paths.js";
import { loadPrompt } from "./prompt-loader.js";
function findLastCompletedSlice(basePath: string, milestoneId: string): string | null {
// Scan disk for slices that have a SUMMARY.md (indicating completion)
const slicesDir = join(gsdRoot(basePath), "milestones", milestoneId, "slices");
if (!existsSync(slicesDir)) return null;
try {
const entries = readdirSync(slicesDir, { withFileTypes: true })
.filter((e) => e.isDirectory() && /^S\d+$/.test(e.name))
.sort((a, b) => b.name.localeCompare(a.name)); // reverse order — latest first
for (const entry of entries) {
const summaryPath = join(slicesDir, entry.name, `${entry.name}-SUMMARY.md`);
if (existsSync(summaryPath)) return entry.name;
}
} catch {
// non-fatal
}
return null;
}
function readSliceSummary(basePath: string, milestoneId: string, sliceId: string): { title: string; content: string } {
const summaryPath = resolveSliceFile(basePath, milestoneId, sliceId, "SUMMARY");
if (summaryPath && existsSync(summaryPath)) {
const content = readFileSync(summaryPath, "utf-8");
const titleMatch = content.match(/^#\s+(.+)/m);
return { title: titleMatch?.[1] ?? sliceId, content };
}
return { title: sliceId, content: "(no summary available)" };
}
function detectTestPatterns(basePath: string): string {
const patterns: string[] = [];
// Check for common test configs
const checks = [
{ file: "jest.config.ts", name: "Jest" },
{ file: "jest.config.js", name: "Jest" },
{ file: "vitest.config.ts", name: "Vitest" },
{ file: "vitest.config.js", name: "Vitest" },
{ file: ".mocharc.yml", name: "Mocha" },
];
for (const check of checks) {
if (existsSync(join(basePath, check.file))) {
patterns.push(`Framework: ${check.name} (${check.file})`);
}
}
// Look for existing test files to infer patterns
const testDirs = ["tests", "test", "src/__tests__", "__tests__"];
for (const dir of testDirs) {
const fullDir = join(basePath, dir);
if (existsSync(fullDir)) {
try {
const files = readdirSync(fullDir).filter((f) => f.endsWith(".test.ts") || f.endsWith(".spec.ts") || f.endsWith(".test.js"));
if (files.length > 0) {
patterns.push(`Test directory: ${dir}/ (${files.length} test files)`);
// Read first test file for patterns
const samplePath = join(fullDir, files[0]);
const sample = readFileSync(samplePath, "utf-8").slice(0, 500);
patterns.push(`Sample pattern from ${files[0]}:\n${sample}`);
break;
}
} catch {
// non-fatal
}
}
}
return patterns.length > 0 ? patterns.join("\n") : "No test framework detected. Use Node.js built-in test runner.";
}
export async function handleAddTests(
args: string,
ctx: ExtensionCommandContext,
pi: ExtensionAPI,
): Promise<void> {
const basePath = process.cwd();
const state = await deriveState(basePath);
if (!state.activeMilestone) {
ctx.ui.notify("No active milestone.", "warning");
return;
}
const milestoneId = state.activeMilestone.id;
// Determine target
const targetId = args.trim() || findLastCompletedSlice(basePath, milestoneId);
if (!targetId) {
ctx.ui.notify(
"No completed slices found. Specify a slice ID: /gsd add-tests S03",
"warning",
);
return;
}
// Gather context
const summary = readSliceSummary(basePath, milestoneId, targetId);
const testPatterns = detectTestPatterns(basePath);
ctx.ui.notify(`Generating tests for ${targetId}: "${summary.title}"...`, "info");
try {
const prompt = loadPrompt("add-tests", {
sliceId: targetId,
sliceTitle: summary.title,
sliceSummary: summary.content,
existingTestPatterns: testPatterns,
workingDirectory: basePath,
});
pi.sendMessage(
{ customType: "gsd-add-tests", content: prompt, display: false },
{ triggerTurn: true },
);
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
ctx.ui.notify(`Failed to dispatch test generation: ${msg}`, "error");
}
}

View file

@ -0,0 +1,182 @@
/**
* GSD Command /gsd backlog
*
* Structured backlog management with 999.x numbering.
* Items stored in .gsd/BACKLOG.md as markdown checklist.
* Items can be promoted to active slices via add-slice.
*/
import type { ExtensionAPI, ExtensionCommandContext } from "@gsd/pi-coding-agent";
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "node:fs";
import { join, dirname } from "node:path";
import { gsdRoot } from "./paths.js";
interface BacklogItem {
id: string;
title: string;
done: boolean;
note: string;
}
function backlogPath(basePath: string): string {
return join(gsdRoot(basePath), "BACKLOG.md");
}
function parseBacklog(basePath: string): BacklogItem[] {
const filePath = backlogPath(basePath);
if (!existsSync(filePath)) return [];
const content = readFileSync(filePath, "utf-8");
const items: BacklogItem[] = [];
for (const line of content.split("\n")) {
const match = line.match(/^- \[([ x])\] (999\.\d+) — (.+?)(?:\s*\((.+)\))?$/);
if (match) {
items.push({
id: match[2],
title: match[3].trim(),
done: match[1] === "x",
note: match[4] ?? "",
});
}
}
return items;
}
function writeBacklog(basePath: string, items: BacklogItem[]): void {
const filePath = backlogPath(basePath);
mkdirSync(dirname(filePath), { recursive: true });
const lines = ["# Backlog\n"];
for (const item of items) {
const check = item.done ? "x" : " ";
const note = item.note ? ` (${item.note})` : "";
lines.push(`- [${check}] ${item.id}${item.title}${note}`);
}
lines.push(""); // trailing newline
writeFileSync(filePath, lines.join("\n"), "utf-8");
}
function nextBacklogId(items: BacklogItem[]): string {
let maxNum = 0;
for (const item of items) {
const match = item.id.match(/^999\.(\d+)$/);
if (match) {
const num = parseInt(match[1], 10);
if (num > maxNum) maxNum = num;
}
}
return `999.${maxNum + 1}`;
}
async function listBacklog(basePath: string, ctx: ExtensionCommandContext): Promise<void> {
const items = parseBacklog(basePath);
if (items.length === 0) {
ctx.ui.notify("Backlog is empty. Add items with /gsd backlog add <title>", "info");
return;
}
const lines = ["Backlog:\n"];
for (const item of items) {
const status = item.done ? "✓" : "○";
const note = item.note ? ` (${item.note})` : "";
lines.push(` ${status} ${item.id}${item.title}${note}`);
}
const pending = items.filter((i) => !i.done).length;
lines.push(`\n${pending} pending, ${items.length - pending} promoted/done`);
ctx.ui.notify(lines.join("\n"), "info");
}
async function addBacklogItem(basePath: string, title: string, ctx: ExtensionCommandContext): Promise<void> {
if (!title) {
ctx.ui.notify("Usage: /gsd backlog add <title>", "warning");
return;
}
const items = parseBacklog(basePath);
const id = nextBacklogId(items);
const date = new Date().toISOString().slice(0, 10);
items.push({ id, title: title.replace(/^['"]|['"]$/g, ""), done: false, note: `added ${date}` });
writeBacklog(basePath, items);
ctx.ui.notify(`Added ${id}: "${title}"`, "success");
}
async function promoteBacklogItem(
basePath: string,
itemId: string,
ctx: ExtensionCommandContext,
pi: ExtensionAPI,
): Promise<void> {
if (!itemId) {
ctx.ui.notify("Usage: /gsd backlog promote <id>\nExample: /gsd backlog promote 999.1", "warning");
return;
}
const items = parseBacklog(basePath);
const item = items.find((i) => i.id === itemId);
if (!item) {
ctx.ui.notify(`Backlog item ${itemId} not found.`, "warning");
return;
}
if (item.done) {
ctx.ui.notify(`${itemId} is already promoted/done.`, "info");
return;
}
// Promote — currently requires single-writer engine (not yet available)
// Mark as promoted in backlog for now; slice creation will be available with the engine.
item.done = true;
item.note = `promoted ${new Date().toISOString().slice(0, 10)}`;
writeBacklog(basePath, items);
ctx.ui.notify(`Promoted ${itemId}: "${item.title}" — add it to the roadmap manually or wait for engine slice commands.`, "info");
}
async function removeBacklogItem(basePath: string, itemId: string, ctx: ExtensionCommandContext): Promise<void> {
if (!itemId) {
ctx.ui.notify("Usage: /gsd backlog remove <id>", "warning");
return;
}
const items = parseBacklog(basePath);
const idx = items.findIndex((i) => i.id === itemId);
if (idx === -1) {
ctx.ui.notify(`Backlog item ${itemId} not found.`, "warning");
return;
}
const removed = items.splice(idx, 1)[0];
writeBacklog(basePath, items);
ctx.ui.notify(`Removed ${removed.id}: "${removed.title}"`, "success");
}
export async function handleBacklog(
args: string,
ctx: ExtensionCommandContext,
pi: ExtensionAPI,
): Promise<void> {
const basePath = process.cwd();
const parts = args.trim().split(/\s+/);
const sub = parts[0] ?? "";
const rest = parts.slice(1).join(" ");
switch (sub) {
case "":
return listBacklog(basePath, ctx);
case "add":
return addBacklogItem(basePath, rest, ctx);
case "promote":
return promoteBacklogItem(basePath, rest.trim(), ctx, pi);
case "remove":
return removeBacklogItem(basePath, rest.trim(), ctx);
default:
// Treat as implicit add
return addBacklogItem(basePath, args, ctx);
}
}

View file

@ -0,0 +1,109 @@
/**
* GSD Command /gsd do
*
* Routes freeform natural language to the correct /gsd subcommand
* using keyword matching. Falls back to /gsd quick for task-like input.
*/
import type { ExtensionAPI, ExtensionCommandContext } from "@gsd/pi-coding-agent";
interface Route {
keywords: string[];
command: string;
}
const ROUTES: Route[] = [
{ keywords: ["progress", "status", "dashboard", "how far", "where are we"], command: "status" },
{ keywords: ["auto", "autonomous", "run all", "keep going", "start auto"], command: "auto" },
{ keywords: ["stop", "halt", "abort"], command: "stop" },
{ keywords: ["pause", "break", "take a break"], command: "pause" },
{ keywords: ["history", "past", "what happened", "previous"], command: "history" },
{ keywords: ["doctor", "health", "diagnose", "check health"], command: "doctor" },
{ keywords: ["clean up", "cleanup", "remove old", "prune", "tidy"], command: "cleanup" },
{ keywords: ["export", "report", "share results"], command: "export" },
{ keywords: ["ship", "pull request", "create pr", "open pr", "merge"], command: "ship" },
{ keywords: ["discuss", "talk about", "architecture", "design"], command: "discuss" },
{ keywords: ["undo", "revert", "rollback", "take back"], command: "undo" },
{ keywords: ["skip", "skip task", "skip this"], command: "skip" },
{ keywords: ["queue", "reorder", "milestone order", "order milestones"], command: "queue" },
{ keywords: ["visualize", "viz", "graph", "chart", "show graph"], command: "visualize" },
{ keywords: ["capture", "note", "idea", "thought", "remember"], command: "capture" },
{ keywords: ["inspect", "database", "sqlite", "db state"], command: "inspect" },
{ keywords: ["knowledge", "rule", "pattern", "lesson"], command: "knowledge" },
{ keywords: ["session report", "session summary", "cost summary", "how much"], command: "session-report" },
{ keywords: ["backlog", "parking lot", "later", "someday"], command: "backlog" },
{ keywords: ["pr branch", "clean branch", "filter commits"], command: "pr-branch" },
{ keywords: ["add tests", "write tests", "generate tests", "test coverage"], command: "add-tests" },
{ keywords: ["next", "step", "next step", "what's next"], command: "next" },
{ keywords: ["migrate", "migration", "convert", "upgrade"], command: "migrate" },
{ keywords: ["steer", "change direction", "pivot", "redirect"], command: "steer" },
{ keywords: ["park", "shelve", "set aside"], command: "park" },
{ keywords: ["widget", "toggle widget"], command: "widget" },
{ keywords: ["logs", "debug logs", "log files"], command: "logs" },
];
interface MatchResult {
command: string;
remainingArgs: string;
score: number;
}
function matchRoute(input: string): MatchResult | null {
const lower = input.toLowerCase();
let bestMatch: MatchResult | null = null;
for (const route of ROUTES) {
for (const keyword of route.keywords) {
if (lower.includes(keyword)) {
const score = keyword.length; // Longer match = higher confidence
if (!bestMatch || score > bestMatch.score) {
// Strip the matched keyword from input to get remaining args
const idx = lower.indexOf(keyword);
const remaining = (input.slice(0, idx) + input.slice(idx + keyword.length)).trim();
bestMatch = { command: route.command, remainingArgs: remaining, score };
}
}
}
}
return bestMatch;
}
export async function handleDo(
args: string,
ctx: ExtensionCommandContext,
pi: ExtensionAPI,
): Promise<void> {
if (!args.trim()) {
ctx.ui.notify(
"Usage: /gsd do <what you want to do>\n\n" +
"Examples:\n" +
" /gsd do show me progress\n" +
" /gsd do run autonomously\n" +
" /gsd do clean up old branches\n" +
" /gsd do fix the login bug",
"warning",
);
return;
}
const match = matchRoute(args);
if (match) {
const fullCommand = match.remainingArgs
? `${match.command} ${match.remainingArgs}`
: match.command;
ctx.ui.notify(`→ /gsd ${fullCommand}`, "info");
// Re-dispatch through the main dispatcher
const { handleGSDCommand } = await import("./commands/dispatcher.js");
await handleGSDCommand(fullCommand, ctx, pi);
return;
}
// No keyword match → treat as quick task
ctx.ui.notify(`→ /gsd quick ${args}`, "info");
const { handleQuick } = await import("./quick.js");
await handleQuick(args, ctx, pi);
}

View file

@ -0,0 +1,234 @@
/**
* GSD Command /gsd pr-branch
*
* Creates a clean PR branch by cherry-picking commits while stripping
* any changes to .gsd/, .planning/, and PLAN.md paths. Useful for
* upstream PRs where planning artifacts should not be included.
*/
import type { ExtensionCommandContext } from "@gsd/pi-coding-agent";
import { execFileSync } from "node:child_process";
import {
nativeGetCurrentBranch,
nativeDetectMainBranch,
nativeBranchExists,
} from "./native-git-bridge.js";
const EXCLUDED_PATHS = [".gsd", ".planning", "PLAN.md"] as const;
function git(basePath: string, args: readonly string[]): string {
return execFileSync("git", args, { cwd: basePath, encoding: "utf-8" }).trim();
}
function gitAllowFail(basePath: string, args: readonly string[]): void {
try {
execFileSync("git", args, { cwd: basePath, encoding: "utf-8", stdio: "pipe" });
} catch {
// ignored — caller opts into non-fatal behavior
}
}
function hasStagedChanges(basePath: string): boolean {
try {
execFileSync("git", ["diff", "--cached", "--quiet"], {
cwd: basePath,
stdio: "pipe",
});
return false;
} catch {
return true;
}
}
function isValidBranchName(name: string): boolean {
try {
execFileSync("git", ["check-ref-format", "--branch", name], { stdio: "pipe" });
return true;
} catch {
return false;
}
}
function getCodeOnlyCommits(basePath: string, base: string, head: string): string[] {
try {
const allCommits = git(basePath, ["log", "--format=%H", `${base}..${head}`])
.split("\n")
.filter(Boolean);
const codeCommits: string[] = [];
for (const sha of allCommits) {
const files = git(basePath, ["diff-tree", "--no-commit-id", "--name-only", "-r", sha])
.split("\n")
.filter(Boolean);
const hasCodeChanges = files.some(
(f) => !f.startsWith(".gsd/") && !f.startsWith(".planning/") && f !== "PLAN.md",
);
if (hasCodeChanges) {
codeCommits.push(sha);
}
}
return codeCommits.reverse(); // chronological for cherry-picking
} catch {
return [];
}
}
/**
* Cherry-pick a commit while stripping excluded paths from the resulting
* commit. Returns true if a commit was produced, false if nothing remained
* after filtering.
*/
function cherryPickFiltered(basePath: string, sha: string): boolean {
git(basePath, ["cherry-pick", "--no-commit", "--allow-empty", sha]);
// Unstage any excluded paths introduced by the cherry-pick.
gitAllowFail(basePath, ["reset", "HEAD", "--", ...EXCLUDED_PATHS]);
// Restore worktree state for excluded paths from HEAD (if tracked),
// then remove any newly introduced untracked files under those paths.
gitAllowFail(basePath, ["checkout", "HEAD", "--", ...EXCLUDED_PATHS]);
gitAllowFail(basePath, ["clean", "-fdq", "--", ...EXCLUDED_PATHS]);
if (!hasStagedChanges(basePath)) {
// Nothing remained after filtering — discard worktree residue and skip.
git(basePath, ["reset", "--hard", "HEAD"]);
return false;
}
git(basePath, ["commit", "-C", sha]);
return true;
}
function assertNoExcludedPaths(basePath: string, base: string): void {
const files = git(basePath, [
"diff",
"--name-only",
`${base}..HEAD`,
])
.split("\n")
.filter(Boolean);
const leaked = files.filter(
(f) => f.startsWith(".gsd/") || f.startsWith(".planning/") || f === "PLAN.md",
);
if (leaked.length > 0) {
throw new Error(
`PR branch still contains excluded paths: ${leaked.slice(0, 5).join(", ")}${
leaked.length > 5 ? ` (+${leaked.length - 5} more)` : ""
}`,
);
}
}
export async function handlePrBranch(
args: string,
ctx: ExtensionCommandContext,
): Promise<void> {
const basePath = process.cwd();
const dryRun = args.includes("--dry-run");
const nameMatch = args.match(/--name\s+(\S+)/);
const currentBranch = nativeGetCurrentBranch(basePath);
const mainBranch = nativeDetectMainBranch(basePath);
// Determine base ref (prefer upstream/main if available)
let baseRef: string;
try {
git(basePath, ["rev-parse", "--verify", "upstream/main"]);
baseRef = "upstream/main";
} catch {
baseRef = mainBranch;
}
// Find commits with code changes
const commits = getCodeOnlyCommits(basePath, baseRef, "HEAD");
if (commits.length === 0) {
ctx.ui.notify("No code-only commits found (all commits only touch .gsd/ files).", "info");
return;
}
if (dryRun) {
const lines = [`Would create PR branch with ${commits.length} commits (filtering .gsd/ paths):\n`];
for (const sha of commits) {
const msg = git(basePath, ["log", "--format=%s", "-1", sha]);
lines.push(` ${sha.slice(0, 8)} ${msg}`);
}
ctx.ui.notify(lines.join("\n"), "info");
return;
}
const requestedName = nameMatch?.[1];
if (requestedName && !isValidBranchName(requestedName)) {
ctx.ui.notify(
`Invalid branch name: ${requestedName}. Must satisfy git check-ref-format.`,
"error",
);
return;
}
const defaultName = `pr/${currentBranch}`;
const prBranch = requestedName ?? defaultName;
if (!isValidBranchName(prBranch)) {
ctx.ui.notify(
`Derived branch name is invalid: ${prBranch}. Use --name to override.`,
"error",
);
return;
}
if (nativeBranchExists(basePath, prBranch)) {
ctx.ui.notify(
`Branch ${prBranch} already exists. Use --name to specify a different name, or delete it first.`,
"warning",
);
return;
}
try {
// Create clean branch from base
git(basePath, ["checkout", "-b", prBranch, baseRef]);
// Cherry-pick with path filter
let picked = 0;
let skipped = 0;
for (const sha of commits) {
try {
if (cherryPickFiltered(basePath, sha)) {
picked++;
} else {
skipped++;
}
} catch (pickErr) {
gitAllowFail(basePath, ["cherry-pick", "--abort"]);
gitAllowFail(basePath, ["reset", "--hard", "HEAD"]);
const detail = pickErr instanceof Error ? pickErr.message : String(pickErr);
ctx.ui.notify(
`Cherry-pick conflict at ${sha.slice(0, 8)}. Picked ${picked}/${commits.length} commits. Resolve manually.\n${detail}`,
"warning",
);
git(basePath, ["checkout", currentBranch]);
return;
}
}
// Post-condition: no excluded paths should appear in the PR branch diff.
assertNoExcludedPaths(basePath, baseRef);
const skippedMsg = skipped > 0 ? ` (${skipped} skipped — contained only planning artifacts)` : "";
ctx.ui.notify(
`Created ${prBranch} with ${picked} commits${skippedMsg} (no .gsd/ artifacts).\nSwitch back: git checkout ${currentBranch}`,
"success",
);
} catch (err) {
// Restore original branch on failure
gitAllowFail(basePath, ["cherry-pick", "--abort"]);
gitAllowFail(basePath, ["reset", "--hard", "HEAD"]);
gitAllowFail(basePath, ["checkout", currentBranch]);
const msg = err instanceof Error ? err.message : String(err);
ctx.ui.notify(`Failed to create PR branch: ${msg}`, "error");
}
}

View file

@ -0,0 +1,101 @@
/**
* GSD Command /gsd session-report
*
* Summarizes the current session: tasks completed, cost, tokens,
* duration, model usage breakdown.
*/
import type { ExtensionCommandContext } from "@gsd/pi-coding-agent";
import { mkdirSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { getLedger, getProjectTotals, aggregateByModel, formatCost, formatTokenCount, loadLedgerFromDisk } from "./metrics.js";
import type { UnitMetrics } from "./metrics.js";
import { gsdRoot } from "./paths.js";
import { formatDuration } from "../shared/format-utils.js";
function formatSessionReport(units: UnitMetrics[]): string {
const totals = getProjectTotals(units);
const byModel = aggregateByModel(units);
const lines: string[] = [];
lines.push("╭─ Session Report ──────────────────────────────────────╮");
if (totals.duration > 0) {
lines.push(`│ Duration: ${formatDuration(totals.duration).padEnd(40)}`);
}
lines.push(`│ Units: ${String(units.length).padEnd(40)}`);
lines.push(`│ Cost: ${formatCost(totals.cost).padEnd(40)}`);
lines.push(`│ Tokens: ${`${formatTokenCount(totals.tokens.input)} in / ${formatTokenCount(totals.tokens.output)} out`.padEnd(40)}`);
lines.push("│ │");
// Work completed
if (units.length > 0) {
lines.push("│ Work Completed: │");
for (const unit of units) {
const finished = unit.finishedAt > 0;
const status = finished ? "✓" : "•";
const label = ` ${status} ${unit.id ?? "unknown"}`;
lines.push(`${label.padEnd(53)}`);
}
lines.push("│ │");
}
// Model usage
if (byModel.length > 0) {
lines.push("│ Model Usage: │");
for (const m of byModel) {
const label = ` ${m.model}: ${m.units} units (${formatCost(m.cost)})`;
lines.push(`${label.padEnd(53)}`);
}
}
lines.push("╰───────────────────────────────────────────────────────╯");
return lines.join("\n");
}
export async function handleSessionReport(
args: string,
ctx: ExtensionCommandContext,
): Promise<void> {
const basePath = process.cwd();
// Get units from in-memory ledger or disk
const ledger = getLedger();
let units: UnitMetrics[];
if (ledger && ledger.units.length > 0) {
units = ledger.units;
} else {
const diskLedger = loadLedgerFromDisk(basePath);
if (!diskLedger || diskLedger.units.length === 0) {
ctx.ui.notify("No session data — no units have been executed yet.", "info");
return;
}
units = diskLedger.units;
}
// JSON output
if (args.includes("--json")) {
const totals = getProjectTotals(units);
const byModel = aggregateByModel(units);
ctx.ui.notify(JSON.stringify({ units: units.length, totals, byModel }, null, 2), "info");
return;
}
// Save to file
if (args.includes("--save")) {
const report = formatSessionReport(units);
const reportsDir = join(gsdRoot(basePath), "reports");
mkdirSync(reportsDir, { recursive: true });
const timestamp = new Date().toISOString().replace(/[:.]/g, "-").slice(0, 19);
const outPath = join(reportsDir, `session-${timestamp}.md`);
writeFileSync(outPath, `\`\`\`\n${report}\n\`\`\`\n`, "utf-8");
ctx.ui.notify(`Report saved: ${outPath}`, "success");
return;
}
// Display
ctx.ui.notify(formatSessionReport(units), "info");
}

View file

@ -0,0 +1,219 @@
/**
* GSD Command /gsd ship
*
* Creates a PR from milestone artifacts: generates title + body from
* roadmap, slice summaries, and metrics, then opens via `gh pr create`.
*/
import type { ExtensionAPI, ExtensionCommandContext } from "@gsd/pi-coding-agent";
import { execFileSync } from "node:child_process";
import { existsSync, readFileSync, readdirSync } from "node:fs";
import { deriveState } from "./state.js";
import { resolveMilestoneFile, resolveSlicePath, resolveSliceFile } from "./paths.js";
import { getLedger, getProjectTotals, aggregateByModel, formatCost, formatTokenCount, loadLedgerFromDisk } from "./metrics.js";
import { nativeGetCurrentBranch, nativeDetectMainBranch } from "./native-git-bridge.js";
import { formatDuration } from "../shared/format-utils.js";
function git(basePath: string, args: readonly string[]): string {
return execFileSync("git", args, { cwd: basePath, encoding: "utf-8" }).trim();
}
function isValidRefName(name: string): boolean {
try {
execFileSync("git", ["check-ref-format", "--branch", name], { stdio: "pipe" });
return true;
} catch {
return false;
}
}
interface PRContent {
title: string;
body: string;
}
function listSliceIds(basePath: string, milestoneId: string): string[] {
// Slices live at <milestoneDir>/slices/<sliceId>/ with canonical S\d+ IDs.
// Use resolveSlicePath with a probe to find the real slices directory root.
const probe = resolveSlicePath(basePath, milestoneId, "S01");
let slicesDir: string | null = null;
if (probe) {
// probe looks like <milestoneDir>/slices/S01 — parent is slices dir.
slicesDir = probe.replace(/[\\/][^\\/]+$/, "");
} else {
// Fall back to scanning the milestones roadmap file's sibling slices dir.
const roadmap = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
if (roadmap) {
slicesDir = roadmap.replace(/[\\/][^\\/]+$/, "") + "/slices";
}
}
if (!slicesDir || !existsSync(slicesDir)) return [];
try {
return readdirSync(slicesDir, { withFileTypes: true })
.filter((e) => e.isDirectory() && /^S\d+$/.test(e.name))
.map((e) => e.name)
.sort();
} catch {
return [];
}
}
function collectSliceSummaries(basePath: string, milestoneId: string): string[] {
const summaries: string[] = [];
for (const sliceId of listSliceIds(basePath, milestoneId)) {
const summaryPath = resolveSliceFile(basePath, milestoneId, sliceId, "SUMMARY");
if (!summaryPath || !existsSync(summaryPath)) continue;
try {
const content = readFileSync(summaryPath, "utf-8").trim();
if (content) summaries.push(`### ${sliceId}\n${content}`);
} catch {
// non-fatal
}
}
return summaries;
}
function generatePRContent(basePath: string, milestoneId: string, milestoneTitle: string): PRContent {
const title = `feat: ${milestoneTitle || milestoneId}`;
const sections: string[] = [];
// TL;DR
sections.push("## TL;DR\n");
sections.push(`**What:** Ship milestone ${milestoneId}${milestoneTitle || "(untitled)"}`);
sections.push(`**Why:** Milestone work complete, ready for review.`);
sections.push(`**How:** See slice summaries below.\n`);
// What — slice summaries
const summaries = collectSliceSummaries(basePath, milestoneId);
if (summaries.length > 0) {
sections.push("## What\n");
sections.push(summaries.join("\n\n"));
sections.push("");
}
// Roadmap status
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
if (roadmapPath && existsSync(roadmapPath)) {
try {
const roadmap = readFileSync(roadmapPath, "utf-8");
const checkboxLines = roadmap.split("\n").filter((l) => /^\s*-\s*\[[ x]\]/.test(l));
if (checkboxLines.length > 0) {
sections.push("## Roadmap\n");
sections.push(checkboxLines.join("\n"));
sections.push("");
}
} catch {
// non-fatal
}
}
// Metrics
const ledger = getLedger();
const units = ledger?.units ?? loadLedgerFromDisk(basePath)?.units ?? [];
if (units.length > 0) {
const totals = getProjectTotals(units);
const byModel = aggregateByModel(units);
sections.push("## Metrics\n");
sections.push(`- **Units executed:** ${units.length}`);
sections.push(`- **Total cost:** ${formatCost(totals.cost)}`);
sections.push(`- **Tokens:** ${formatTokenCount(totals.tokens.input)} input / ${formatTokenCount(totals.tokens.output)} output`);
if (totals.duration > 0) {
sections.push(`- **Duration:** ${formatDuration(totals.duration)}`);
}
if (byModel.length > 0) {
sections.push(`- **Models:** ${byModel.map((m) => `${m.model} (${m.units} units)`).join(", ")}`);
}
sections.push("");
}
// Change type checklist
sections.push("## Change type\n");
sections.push("- [x] `feat` — New feature or capability");
sections.push("- [ ] `fix` — Bug fix");
sections.push("- [ ] `refactor` — Code restructuring");
sections.push("- [ ] `test` — Adding or updating tests");
sections.push("- [ ] `docs` — Documentation only");
sections.push("- [ ] `chore` — Build, CI, or tooling changes\n");
// AI disclosure
sections.push("---\n");
sections.push("*This PR was prepared with AI assistance (GSD auto-mode).*");
return { title, body: sections.join("\n") };
}
export async function handleShip(
args: string,
ctx: ExtensionCommandContext,
_pi: ExtensionAPI,
): Promise<void> {
const basePath = process.cwd();
const dryRun = args.includes("--dry-run");
const draft = args.includes("--draft");
const force = args.includes("--force");
const baseMatch = args.match(/--base\s+(\S+)/);
const base = baseMatch?.[1] ?? nativeDetectMainBranch(basePath);
if (!isValidRefName(base)) {
ctx.ui.notify(`Invalid base branch name: ${base}`, "error");
return;
}
// 1. Validate milestone state
const state = await deriveState(basePath);
if (!state.activeMilestone) {
ctx.ui.notify("No active milestone to ship. Complete milestone work first.", "warning");
return;
}
const milestoneId = state.activeMilestone.id;
const milestoneTitle = state.activeMilestone.title ?? "";
// 2. Check for incomplete work (use GSD phase as proxy — no phase field on ActiveRef)
if (state.phase !== "complete" && !force) {
ctx.ui.notify(
`Milestone ${milestoneId} may not be complete (phase: ${state.phase}). Use --force to ship anyway.`,
"warning",
);
return;
}
// 3. Generate PR content
const { title, body } = generatePRContent(basePath, milestoneId, milestoneTitle);
// 4. Dry-run — just show the PR content
if (dryRun) {
ctx.ui.notify(`--- PR Preview ---\n\nTitle: ${title}\n\n${body}`, "info");
return;
}
// 5. Check git state
const currentBranch = nativeGetCurrentBranch(basePath);
if (!isValidRefName(currentBranch)) {
ctx.ui.notify(`Current branch name is invalid for git: ${currentBranch}`, "error");
return;
}
if (currentBranch === base) {
ctx.ui.notify(`You're on ${base} — create a feature branch first.`, "warning");
return;
}
// 6. Push and create PR (all argv-safe, no shell interpolation)
try {
git(basePath, ["push", "-u", "origin", currentBranch]);
const ghArgs = ["pr", "create", "--base", base, "--title", title, "--body", body];
if (draft) ghArgs.push("--draft");
const prUrl = execFileSync("gh", ghArgs, { cwd: basePath, encoding: "utf-8" }).trim();
ctx.ui.notify(`PR created: ${prUrl}`, "success");
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
ctx.ui.notify(`Failed to create PR: ${msg}`, "error");
}
}

View file

@ -15,7 +15,7 @@ export interface GsdCommandDefinition {
type CompletionMap = Record<string, readonly GsdCommandDefinition[]>;
export const GSD_COMMAND_DESCRIPTION =
"GSD — Get Shit Done: /gsd help|start|templates|next|auto|stop|pause|status|widget|visualize|queue|quick|discuss|capture|triage|dispatch|history|undo|undo-task|reset-slice|rate|skip|export|cleanup|model|mode|prefs|config|keys|hooks|run-hook|skill-health|doctor|logs|forensics|changelog|migrate|remote|steer|knowledge|new-milestone|parallel|cmux|park|unpark|init|setup|inspect|extensions|update|fast|mcp|rethink|codebase|notifications";
"GSD — Get Shit Done: /gsd help|start|templates|next|auto|stop|pause|status|widget|visualize|queue|quick|discuss|capture|triage|dispatch|history|undo|undo-task|reset-slice|rate|skip|export|cleanup|model|mode|prefs|config|keys|hooks|run-hook|skill-health|doctor|logs|forensics|changelog|migrate|remote|steer|knowledge|new-milestone|parallel|cmux|park|unpark|init|setup|inspect|extensions|update|fast|mcp|rethink|codebase|notifications|ship|do|session-report|backlog|pr-branch|add-tests";
export const TOP_LEVEL_SUBCOMMANDS: readonly GsdCommandDefinition[] = [
{ cmd: "help", desc: "Categorized command reference with descriptions" },
@ -74,6 +74,12 @@ export const TOP_LEVEL_SUBCOMMANDS: readonly GsdCommandDefinition[] = [
{ cmd: "rethink", desc: "Conversational project reorganization — reorder, park, discard, add milestones" },
{ cmd: "workflow", desc: "Custom workflow lifecycle (new, run, list, validate, pause, resume)" },
{ cmd: "codebase", desc: "Generate, refresh, and inspect the codebase map cache (.gsd/CODEBASE.md)" },
{ cmd: "ship", desc: "Create PR from milestone artifacts and open for review" },
{ cmd: "do", desc: "Route freeform text to the right GSD command" },
{ cmd: "session-report", desc: "Session cost, tokens, and work summary" },
{ cmd: "backlog", desc: "Manage backlog items (add, promote, remove, list)" },
{ cmd: "pr-branch", desc: "Create clean PR branch filtering .gsd/ commits" },
{ cmd: "add-tests", desc: "Generate tests for completed slices" },
];
const NESTED_COMPLETIONS: CompletionMap = {
@ -244,6 +250,25 @@ const NESTED_COMPLETIONS: CompletionMap = {
{ cmd: "stats", desc: "Show file count, description coverage, and generation time" },
{ cmd: "help", desc: "Show usage and available subcommands" },
],
ship: [
{ cmd: "--dry-run", desc: "Preview PR without creating" },
{ cmd: "--draft", desc: "Open as draft PR" },
{ cmd: "--base", desc: "Override target branch (default: main)" },
{ cmd: "--force", desc: "Ship even with pending tasks" },
],
"session-report": [
{ cmd: "--json", desc: "Machine-readable JSON output" },
{ cmd: "--save", desc: "Save report to .gsd/reports/" },
],
backlog: [
{ cmd: "add", desc: "Add item to backlog" },
{ cmd: "promote", desc: "Promote backlog item to active slice" },
{ cmd: "remove", desc: "Remove backlog item" },
],
"pr-branch": [
{ cmd: "--dry-run", desc: "Preview what would be filtered" },
{ cmd: "--name", desc: "Custom branch name" },
],
};
function filterOptions(

View file

@ -11,6 +11,9 @@ import { handleExport } from "../../export.js";
import { handleHistory } from "../../history.js";
import { handleUndo } from "../../undo.js";
import { handleRemote } from "../../../remote-questions/mod.js";
import { handleShip } from "../../commands-ship.js";
import { handleSessionReport } from "../../commands-session-report.js";
import { handlePrBranch } from "../../commands-pr-branch.js";
import { projectRoot } from "../context.js";
export async function handleOpsCommand(trimmed: string, ctx: ExtensionCommandContext, pi: ExtensionAPI): Promise<boolean> {
@ -216,5 +219,22 @@ Examples:
await handleCodebase(trimmed.replace(/^codebase\s*/, "").trim(), ctx, pi);
return true;
}
if (trimmed === "ship" || trimmed.startsWith("ship ")) {
await handleShip(trimmed.replace(/^ship\s*/, "").trim(), ctx, pi);
return true;
}
if (trimmed === "session-report" || trimmed.startsWith("session-report ")) {
await handleSessionReport(trimmed.replace(/^session-report\s*/, "").trim(), ctx);
return true;
}
if (trimmed === "pr-branch" || trimmed.startsWith("pr-branch ")) {
await handlePrBranch(trimmed.replace(/^pr-branch\s*/, "").trim(), ctx);
return true;
}
if (trimmed === "add-tests" || trimmed.startsWith("add-tests ")) {
const { handleAddTests } = await import("../../commands-add-tests.js");
await handleAddTests(trimmed.replace(/^add-tests\s*/, "").trim(), ctx, pi);
return true;
}
return false;
}

View file

@ -221,6 +221,18 @@ async function handleCustomWorkflow(
}
export async function handleWorkflowCommand(trimmed: string, ctx: ExtensionCommandContext, pi: ExtensionAPI): Promise<boolean> {
// ── /gsd do — natural language routing (must be early to route to other commands) ──
if (trimmed === "do" || trimmed.startsWith("do ")) {
const { handleDo } = await import("../../commands-do.js");
await handleDo(trimmed.replace(/^do\s*/, "").trim(), ctx, pi);
return true;
}
// ── Backlog management ──
if (trimmed === "backlog" || trimmed.startsWith("backlog ")) {
const { handleBacklog } = await import("../../commands-backlog.js");
await handleBacklog(trimmed.replace(/^backlog\s*/, "").trim(), ctx, pi);
return true;
}
// ── Custom workflow commands (`/gsd workflow ...`) ──
if (trimmed === "workflow" || trimmed.startsWith("workflow ")) {
const sub = trimmed.slice("workflow".length).trim();

View file

@ -0,0 +1,35 @@
You are generating tests for recently completed GSD work.
## Slice: {{sliceId}} — {{sliceTitle}}
### Summary
{{sliceSummary}}
### Existing Test Patterns
{{existingTestPatterns}}
## Working Directory
`{{workingDirectory}}`
## Instructions
1. Read the slice summary above to understand what was built
2. Identify the source files that were created or modified for this slice
3. Read the implementation code to understand behavior, edge cases, and error paths
4. Write comprehensive tests following the project's existing test patterns and framework
5. Run the tests to verify they pass
6. Fix any failures
### Rules
- Follow the project's existing test patterns (framework, assertions, file structure)
- Test behavior, not implementation details
- Cover: happy path, edge cases, error conditions, boundary values
- Do NOT modify implementation files — only create or update test files
- Name test files consistently with the project's conventions
- Keep tests focused and readable
{{skillActivation}}

View file

@ -0,0 +1,158 @@
import test from "node:test";
import assert from "node:assert/strict";
import { mkdirSync, writeFileSync, readFileSync, existsSync, rmSync } from "node:fs";
import { join } from "node:path";
import { tmpdir } from "node:os";
import { randomUUID } from "node:crypto";
// ─── Helpers ──────────────────────────────────────────────────────────────
function makeTmpBase(): string {
const base = join(tmpdir(), `gsd-backlog-test-${randomUUID()}`);
mkdirSync(join(base, ".gsd"), { recursive: true });
return base;
}
function cleanup(base: string): void {
try { rmSync(base, { recursive: true, force: true }); } catch { /* */ }
}
function backlogPath(base: string): string {
return join(base, ".gsd", "BACKLOG.md");
}
function writeBacklog(base: string, content: string): void {
writeFileSync(backlogPath(base), content, "utf-8");
}
function readBacklog(base: string): string {
return readFileSync(backlogPath(base), "utf-8");
}
// Test the parsing/writing logic inline since the handler requires runtime context
interface BacklogItem {
id: string;
title: string;
done: boolean;
note: string;
}
function parseBacklog(content: string): BacklogItem[] {
const items: BacklogItem[] = [];
for (const line of content.split("\n")) {
const match = line.match(/^- \[([ x])\] (999\.\d+) — (.+?)(?:\s*\((.+)\))?$/);
if (match) {
items.push({
id: match[2],
title: match[3].trim(),
done: match[1] === "x",
note: match[4] ?? "",
});
}
}
return items;
}
function formatBacklog(items: BacklogItem[]): string {
const lines = ["# Backlog\n"];
for (const item of items) {
const check = item.done ? "x" : " ";
const note = item.note ? ` (${item.note})` : "";
lines.push(`- [${check}] ${item.id}${item.title}${note}`);
}
lines.push("");
return lines.join("\n");
}
// ─── Tests ──────────────────────────────────────────────────────────────
test("backlog: parse empty file returns empty array", () => {
const items = parseBacklog("");
assert.equal(items.length, 0);
});
test("backlog: parse valid entries", () => {
const content = `# Backlog
- [ ] 999.1 OAuth support (added 2026-03-23)
- [x] 999.2 Rate limiting (promoted 2026-03-24)
- [ ] 999.3 Dark mode`;
const items = parseBacklog(content);
assert.equal(items.length, 3);
assert.equal(items[0].id, "999.1");
assert.equal(items[0].title, "OAuth support");
assert.equal(items[0].done, false);
assert.equal(items[0].note, "added 2026-03-23");
assert.equal(items[1].id, "999.2");
assert.equal(items[1].done, true);
assert.equal(items[1].note, "promoted 2026-03-24");
assert.equal(items[2].id, "999.3");
assert.equal(items[2].title, "Dark mode");
assert.equal(items[2].note, "");
});
test("backlog: format roundtrips correctly", () => {
const items: BacklogItem[] = [
{ id: "999.1", title: "OAuth support", done: false, note: "added 2026-03-23" },
{ id: "999.2", title: "Rate limiting", done: true, note: "promoted 2026-03-24" },
];
const formatted = formatBacklog(items);
const parsed = parseBacklog(formatted);
assert.equal(parsed.length, 2);
assert.equal(parsed[0].id, "999.1");
assert.equal(parsed[0].title, "OAuth support");
assert.equal(parsed[1].done, true);
});
test("backlog: write and read from disk", () => {
const base = makeTmpBase();
try {
const items: BacklogItem[] = [
{ id: "999.1", title: "Test item", done: false, note: "added 2026-03-23" },
];
writeBacklog(base, formatBacklog(items));
assert.ok(existsSync(backlogPath(base)));
const content = readBacklog(base);
assert.ok(content.includes("999.1"));
assert.ok(content.includes("Test item"));
} finally {
cleanup(base);
}
});
test("backlog: next ID increments correctly", () => {
const items: BacklogItem[] = [
{ id: "999.1", title: "First", done: false, note: "" },
{ id: "999.2", title: "Second", done: false, note: "" },
{ id: "999.5", title: "Fifth", done: false, note: "" },
];
let maxNum = 0;
for (const item of items) {
const match = item.id.match(/^999\.(\d+)$/);
if (match) {
const num = parseInt(match[1], 10);
if (num > maxNum) maxNum = num;
}
}
const nextId = `999.${maxNum + 1}`;
assert.equal(nextId, "999.6");
});
test("backlog: empty backlog returns no items", () => {
const base = makeTmpBase();
try {
// No BACKLOG.md exists
assert.ok(!existsSync(backlogPath(base)));
// Would return empty array
} finally {
cleanup(base);
}
});

View file

@ -0,0 +1,127 @@
import test from "node:test";
import assert from "node:assert/strict";
// ─── Mock dispatcher to capture routed commands ─────────────────────────
let lastRouted: string | null = null;
let lastQuick: string | null = null;
const mockCtx = {
ui: {
notify: (_msg: string, _level: string) => {},
},
} as any;
// We test the keyword matching logic directly since the handler imports
// the dispatcher dynamically (which requires the full extension runtime).
// Inline the route-matching logic from commands-do.ts for unit testing.
interface Route {
keywords: string[];
command: string;
}
const ROUTES: Route[] = [
{ keywords: ["progress", "status", "dashboard", "how far", "where are we"], command: "status" },
{ keywords: ["auto", "autonomous", "run all", "keep going", "start auto"], command: "auto" },
{ keywords: ["stop", "halt", "abort"], command: "stop" },
{ keywords: ["pause", "break", "take a break"], command: "pause" },
{ keywords: ["history", "past", "what happened", "previous"], command: "history" },
{ keywords: ["doctor", "health", "diagnose", "check health"], command: "doctor" },
{ keywords: ["clean up", "cleanup", "remove old", "prune", "tidy"], command: "cleanup" },
{ keywords: ["ship", "pull request", "create pr", "open pr", "merge"], command: "ship" },
{ keywords: ["discuss", "talk about", "architecture", "design"], command: "discuss" },
{ keywords: ["undo", "revert", "rollback", "take back"], command: "undo" },
{ keywords: ["skip", "skip task", "skip this"], command: "skip" },
{ keywords: ["visualize", "viz", "graph", "chart", "show graph"], command: "visualize" },
{ keywords: ["capture", "note", "idea", "thought", "remember"], command: "capture" },
{ keywords: ["inspect", "database", "sqlite", "db state"], command: "inspect" },
{ keywords: ["session report", "session summary", "cost summary", "how much"], command: "session-report" },
{ keywords: ["backlog", "parking lot", "later", "someday"], command: "backlog" },
{ keywords: ["add tests", "write tests", "generate tests", "test coverage"], command: "add-tests" },
{ keywords: ["next", "step", "next step", "what's next"], command: "next" },
];
interface MatchResult {
command: string;
remainingArgs: string;
score: number;
}
function matchRoute(input: string): MatchResult | null {
const lower = input.toLowerCase();
let bestMatch: MatchResult | null = null;
for (const route of ROUTES) {
for (const keyword of route.keywords) {
if (lower.includes(keyword)) {
const score = keyword.length;
if (!bestMatch || score > bestMatch.score) {
const idx = lower.indexOf(keyword);
const remaining = (input.slice(0, idx) + input.slice(idx + keyword.length)).trim();
bestMatch = { command: route.command, remainingArgs: remaining, score };
}
}
}
}
return bestMatch;
}
// ─── Tests ──────────────────────────────────────────────────────────────
test("/gsd do: routes 'show me progress' to status", () => {
const match = matchRoute("show me progress");
assert.ok(match);
assert.equal(match.command, "status");
});
test("/gsd do: routes 'run autonomously' to auto", () => {
const match = matchRoute("run autonomously");
assert.ok(match);
assert.equal(match.command, "auto");
});
test("/gsd do: routes 'clean up old branches' to cleanup", () => {
const match = matchRoute("clean up old branches");
assert.ok(match);
assert.equal(match.command, "cleanup");
assert.equal(match.remainingArgs, "old branches");
});
test("/gsd do: routes 'create pr for milestone' to ship", () => {
const match = matchRoute("create pr for milestone");
assert.ok(match);
assert.equal(match.command, "ship");
});
test("/gsd do: routes 'add tests for S03' to add-tests", () => {
const match = matchRoute("add tests for S03");
assert.ok(match);
assert.equal(match.command, "add-tests");
});
test("/gsd do: routes 'what is next' to next", () => {
const match = matchRoute("what's next");
assert.ok(match);
assert.equal(match.command, "next");
});
test("/gsd do: returns null for unrecognized input", () => {
const match = matchRoute("florbinate the gizmo");
assert.equal(match, null);
});
test("/gsd do: prefers longer keyword match", () => {
// "check health" (12 chars) should beat "health" (6 chars)
const match = matchRoute("check health of the system");
assert.ok(match);
assert.equal(match.command, "doctor");
assert.ok(match.score >= 12);
});
test("/gsd do: routes 'session report' to session-report", () => {
const match = matchRoute("show me the session report");
assert.ok(match);
assert.equal(match.command, "session-report");
});

View file

@ -0,0 +1,68 @@
import test from "node:test";
import assert from "node:assert/strict";
// Test the filtering logic used by /gsd pr-branch.
// Full integration requires git operations, so we test the path filtering.
test("pr-branch: identifies .gsd/ paths", () => {
const files = [
".gsd/milestones/M001/ROADMAP.md",
".gsd/metrics.json",
"src/main.ts",
"package.json",
".planning/PLAN.md",
"PLAN.md",
];
const codeFiles = files.filter(
(f) => !f.startsWith(".gsd/") && !f.startsWith(".planning/") && f !== "PLAN.md",
);
assert.deepEqual(codeFiles, ["src/main.ts", "package.json"]);
});
test("pr-branch: all .gsd/ files returns empty", () => {
const files = [
".gsd/milestones/M001/ROADMAP.md",
".gsd/metrics.json",
".gsd/BACKLOG.md",
];
const codeFiles = files.filter(
(f) => !f.startsWith(".gsd/") && !f.startsWith(".planning/") && f !== "PLAN.md",
);
assert.equal(codeFiles.length, 0);
});
test("pr-branch: mixed commits with code changes", () => {
const files = [
".gsd/milestones/M001/ROADMAP.md",
"src/auth.ts",
"src/auth.test.ts",
];
const hasCodeChanges = files.some(
(f) => !f.startsWith(".gsd/") && !f.startsWith(".planning/") && f !== "PLAN.md",
);
assert.ok(hasCodeChanges);
});
test("pr-branch: --dry-run flag", () => {
assert.ok("--dry-run".includes("--dry-run"));
assert.ok(!"--name my-branch".includes("--dry-run"));
});
test("pr-branch: --name flag parsing", () => {
const args = "--name my-clean-pr";
const nameMatch = args.match(/--name\s+(\S+)/);
assert.ok(nameMatch);
assert.equal(nameMatch[1], "my-clean-pr");
});
test("pr-branch: default branch name", () => {
const currentBranch = "feat/add-auth";
const prBranch = `pr/${currentBranch}`;
assert.equal(prBranch, "pr/feat/add-auth");
});

View file

@ -0,0 +1,82 @@
import test from "node:test";
import assert from "node:assert/strict";
// Test the formatting logic used by session-report.
// The actual handler requires runtime context (metrics module), so we
// test the core formatting and aggregation patterns.
test("session-report: format cost correctly", () => {
// Simple cost formatting test
const formatCost = (cost: number): string => {
if (cost < 0.01) return "<$0.01";
return `$${cost.toFixed(2)}`;
};
assert.equal(formatCost(0), "<$0.01");
assert.equal(formatCost(0.005), "<$0.01");
assert.equal(formatCost(1.5), "$1.50");
assert.equal(formatCost(10.999), "$11.00");
});
test("session-report: format token count", () => {
const formatTokenCount = (count: number): string => {
if (count >= 1_000_000) return `${(count / 1_000_000).toFixed(1)}M`;
if (count >= 1_000) return `${(count / 1_000).toFixed(1)}K`;
return String(count);
};
assert.equal(formatTokenCount(500), "500");
assert.equal(formatTokenCount(1500), "1.5K");
assert.equal(formatTokenCount(1_200_000), "1.2M");
});
test("session-report: aggregate by model", () => {
interface UnitMetric {
model: string;
cost: number;
}
const units: UnitMetric[] = [
{ model: "opus", cost: 1.0 },
{ model: "opus", cost: 0.8 },
{ model: "sonnet", cost: 0.3 },
{ model: "sonnet", cost: 0.5 },
{ model: "sonnet", cost: 0.2 },
];
const byModel = new Map<string, { count: number; cost: number }>();
for (const u of units) {
const existing = byModel.get(u.model) ?? { count: 0, cost: 0 };
existing.count++;
existing.cost += u.cost;
byModel.set(u.model, existing);
}
const opus = byModel.get("opus")!;
assert.equal(opus.count, 2);
assert.ok(Math.abs(opus.cost - 1.8) < 0.01);
const sonnet = byModel.get("sonnet")!;
assert.equal(sonnet.count, 3);
assert.ok(Math.abs(sonnet.cost - 1.0) < 0.01);
});
test("session-report: --json flag detection", () => {
const args1 = "--json";
const args2 = "--save --json";
const args3 = "something else";
assert.ok(args1.includes("--json"));
assert.ok(args2.includes("--json"));
assert.ok(!args3.includes("--json"));
});
test("session-report: --save flag detection", () => {
const args1 = "--save";
const args2 = "--save --json";
const args3 = "";
assert.ok(args1.includes("--save"));
assert.ok(args2.includes("--save"));
assert.ok(!args3.includes("--save"));
});

View file

@ -0,0 +1,71 @@
import test from "node:test";
import assert from "node:assert/strict";
// Test the PR content generation logic used by /gsd ship.
// Full integration requires gh CLI + git, so we test the text generation.
test("ship: generates TL;DR format", () => {
// Simulate generatePRContent output structure
const milestoneId = "M001";
const milestoneTitle = "User authentication system";
const title = `feat: ${milestoneTitle}`;
assert.equal(title, "feat: User authentication system");
assert.ok(title.length < 80); // PR title should be short
});
test("ship: --dry-run flag detection", () => {
const args1 = "--dry-run";
const args2 = "--draft --dry-run";
const args3 = "--draft";
assert.ok(args1.includes("--dry-run"));
assert.ok(args2.includes("--dry-run"));
assert.ok(!args3.includes("--dry-run"));
});
test("ship: --base flag parsing", () => {
const args = "--base develop --draft";
const baseMatch = args.match(/--base\s+(\S+)/);
assert.ok(baseMatch);
assert.equal(baseMatch[1], "develop");
});
test("ship: --base flag absent defaults", () => {
const args = "--draft";
const baseMatch = args.match(/--base\s+(\S+)/);
assert.equal(baseMatch, null);
});
test("ship: --force flag detection", () => {
const args1 = "--force";
const args2 = "";
assert.ok(args1.includes("--force"));
assert.ok(!args2.includes("--force"));
});
test("ship: change type checklist format", () => {
const checklist = [
"- [x] `feat` — New feature or capability",
"- [ ] `fix` — Bug fix",
"- [ ] `refactor` — Code restructuring",
"- [ ] `test` — Adding or updating tests",
"- [ ] `docs` — Documentation only",
"- [ ] `chore` — Build, CI, or tooling changes",
];
// Verify format matches CONTRIBUTING.md expectations
for (const line of checklist) {
assert.match(line, /^- \[[ x]\] `\w+` — .+$/);
}
});
test("ship: PR body contains required sections", () => {
const requiredSections = ["## TL;DR", "## Change type"];
const body = "## TL;DR\n\n**What:** Ship M001\n\n## Change type\n\n- [x] `feat`";
for (const section of requiredSections) {
assert.ok(body.includes(section), `Missing section: ${section}`);
}
});