feat: /gsd migrate — .planning to .gsd migration tool
Add `/gsd migrate [path]` command that reads old get-shit-done .planning directories and writes complete .gsd directory trees for GSD-2. Pipeline: validate → parse → transform → preview → confirm → write → review Parser (S01): - 7 per-file parsers: roadmap, plan, summary, requirements, project, state, config - Handles flat, milestone-sectioned, and <details>-block roadmap formats - Bold phase entries, "Phase N:" format, decimal numbering, duplicate phase numbers - Bullet-format requirements (- [x] **ID**: Description) - Graceful null returns for missing files, severity-classified validation Transformer (S02): - Phases → slices, plans → tasks, milestones → milestones - Float-sorted decimal phases renumbered sequentially (S01, S02, ...) - Completion state preserved (roadmap [x] → slice done, summary → task done) - Research consolidated with fixed file-type ordering - Requirements classified with complete/done/shipped → validated normalization - Vision derived from PROJECT.md with three-level fallback - Duplicate phase numbers disambiguated by title similarity Writer (S03): - Format functions for all GSD-2 file types with round-trip verification - writeGSDDirectory produces tree that deriveState() reads correctly - generatePreview computes milestone/slice/task counts + completion % - Null research and empty requirements silently skipped Command (S04): - Default to cwd when no args given; ~/path expansion - Validation gating (fatal issues block pipeline) - Preview with showNextAction confirmation - Post-write agent review via prompts/review-migration.md template - Wired into commands.ts with tab completion Also: - .gitignore: replace granular .gsd/* entries with .gsd/ catch-all - README: add /gsd migrate to commands table + "Migrating from v1" section - files.ts: widen parseRequirementCounts regex for non-R prefixed IDs 478 assertions across 6 test suites, all passing. UAT against blade/bladeai (28 phases, 8 milestones) and aire (10 phases, 2 milestones).
This commit is contained in:
parent
0ec7a36cd6
commit
e136844353
20 changed files with 5681 additions and 17 deletions
9
.gitignore
vendored
9
.gitignore
vendored
|
|
@ -1,10 +1,6 @@
|
|||
|
||||
# ── GSD baseline (auto-generated) ──
|
||||
.gsd/activity/
|
||||
.gsd/runtime/
|
||||
.gsd/auto.lock
|
||||
.gsd/metrics.json
|
||||
.gsd/STATE.md
|
||||
# ── GSD (user project artifacts — never commit) ──
|
||||
.gsd/
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
*.swp
|
||||
|
|
@ -36,6 +32,5 @@ tmp/
|
|||
dist/
|
||||
.bg_shell
|
||||
.gsd*.tgz
|
||||
.gsd
|
||||
.artifacts/
|
||||
AGENTS.md
|
||||
23
README.md
23
README.md
|
|
@ -46,6 +46,28 @@ GSD v2 solves all of these because it's not a prompt framework anymore — it's
|
|||
| Roadmap reassessment | Manual | Automatic after each slice completes |
|
||||
| Skill discovery | None | Auto-detect and install relevant skills during research |
|
||||
|
||||
### Migrating from v1
|
||||
|
||||
If you have projects with `.planning` directories from the original Get Shit Done, you can migrate them to GSD-2's `.gsd` format:
|
||||
|
||||
```bash
|
||||
# From within the project directory
|
||||
/gsd migrate
|
||||
|
||||
# Or specify a path
|
||||
/gsd migrate ~/projects/my-old-project
|
||||
```
|
||||
|
||||
The migration tool:
|
||||
- Parses your old `PROJECT.md`, `ROADMAP.md`, `REQUIREMENTS.md`, phase directories, plans, summaries, and research
|
||||
- Maps phases → slices, plans → tasks, milestones → milestones
|
||||
- Preserves completion state (`[x]` phases stay done, summaries carry over)
|
||||
- Consolidates research files into the new structure
|
||||
- Shows a preview before writing anything
|
||||
- Optionally runs an agent-driven review of the output for quality assurance
|
||||
|
||||
Supports format variations including milestone-sectioned roadmaps with `<details>` blocks, bold phase entries, bullet-format requirements, decimal phase numbering, and duplicate phase numbers across milestones.
|
||||
|
||||
---
|
||||
|
||||
## How It Works
|
||||
|
|
@ -187,6 +209,7 @@ On first run, GSD prompts for optional API keys (Brave Search, Context7, Jina) f
|
|||
| `/gsd status` | Progress dashboard |
|
||||
| `/gsd queue` | Queue future milestones (safe during auto mode) |
|
||||
| `/gsd prefs` | Model selection, timeouts, budget ceiling |
|
||||
| `/gsd migrate` | Migrate a v1 `.planning` directory to `.gsd` format |
|
||||
| `/gsd doctor` | Validate `.gsd/` integrity, find and fix issues |
|
||||
| `Ctrl+Alt+G` | Toggle dashboard overlay |
|
||||
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ import {
|
|||
filterDoctorIssues,
|
||||
} from "./doctor.js";
|
||||
import { loadPrompt } from "./prompt-loader.js";
|
||||
import { handleMigrate } from "./migrate/command.js";
|
||||
|
||||
function dispatchDoctorHeal(pi: ExtensionAPI, scope: string | undefined, reportText: string, structuredIssues: string): void {
|
||||
const workflowPath = process.env.GSD_WORKFLOW_PATH ?? join(process.env.HOME ?? "~", ".pi", "GSD-WORKFLOW.md");
|
||||
|
|
@ -51,10 +52,10 @@ function dispatchDoctorHeal(pi: ExtensionAPI, scope: string | undefined, reportT
|
|||
|
||||
export function registerGSDCommand(pi: ExtensionAPI): void {
|
||||
pi.registerCommand("gsd", {
|
||||
description: "GSD — Get Stuff Done: /gsd auto|stop|status|queue|prefs|doctor",
|
||||
description: "GSD — Get Stuff Done: /gsd auto|stop|status|queue|prefs|doctor|migrate",
|
||||
|
||||
getArgumentCompletions: (prefix: string) => {
|
||||
const subcommands = ["auto", "stop", "status", "queue", "discuss", "prefs", "doctor"];
|
||||
const subcommands = ["auto", "stop", "status", "queue", "discuss", "prefs", "doctor", "migrate"];
|
||||
const parts = prefix.trim().split(/\s+/);
|
||||
|
||||
if (parts.length <= 1) {
|
||||
|
|
@ -136,13 +137,18 @@ export function registerGSDCommand(pi: ExtensionAPI): void {
|
|||
return;
|
||||
}
|
||||
|
||||
if (trimmed === "migrate" || trimmed.startsWith("migrate ")) {
|
||||
await handleMigrate(trimmed.replace(/^migrate\s*/, "").trim(), ctx, pi);
|
||||
return;
|
||||
}
|
||||
|
||||
if (trimmed === "") {
|
||||
await showSmartEntry(ctx, pi, process.cwd());
|
||||
return;
|
||||
}
|
||||
|
||||
ctx.ui.notify(
|
||||
`Unknown: /gsd ${trimmed}. Use /gsd, /gsd auto, /gsd stop, /gsd status, /gsd queue, /gsd discuss, /gsd prefs [global|project|status], or /gsd doctor [audit|fix|heal] [M###/S##].`,
|
||||
`Unknown: /gsd ${trimmed}. Use /gsd, /gsd auto, /gsd stop, /gsd status, /gsd queue, /gsd discuss, /gsd prefs [global|project|status], /gsd doctor [audit|fix|heal] [M###/S##], or /gsd migrate <path>.`,
|
||||
"warning",
|
||||
);
|
||||
},
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ import type {
|
|||
* Split markdown content into frontmatter (YAML-like) and body.
|
||||
* Returns [frontmatterLines, body] where frontmatterLines is null if no frontmatter.
|
||||
*/
|
||||
function splitFrontmatter(content: string): [string[] | null, string] {
|
||||
export function splitFrontmatter(content: string): [string[] | null, string] {
|
||||
const trimmed = content.trimStart();
|
||||
if (!trimmed.startsWith('---')) return [null, content];
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ function splitFrontmatter(content: string): [string[] | null, string] {
|
|||
* Handles simple scalars and arrays (lines starting with " - ").
|
||||
* Handles nested objects like requires (lines with " key: value").
|
||||
*/
|
||||
function parseFrontmatterMap(lines: string[]): Record<string, unknown> {
|
||||
export function parseFrontmatterMap(lines: string[]): Record<string, unknown> {
|
||||
const result: Record<string, unknown> = {};
|
||||
let currentKey: string | null = null;
|
||||
let currentArray: unknown[] | null = null;
|
||||
|
|
@ -124,7 +124,7 @@ function parseFrontmatterMap(lines: string[]): Record<string, unknown> {
|
|||
}
|
||||
|
||||
/** Extract the text after a heading at a given level, up to the next heading of same or higher level. */
|
||||
function extractSection(body: string, heading: string, level: number = 2): string | null {
|
||||
export function extractSection(body: string, heading: string, level: number = 2): string | null {
|
||||
const prefix = '#'.repeat(level) + ' ';
|
||||
const regex = new RegExp(`^${prefix}${escapeRegex(heading)}\\s*$`, 'm');
|
||||
const match = regex.exec(body);
|
||||
|
|
@ -140,7 +140,7 @@ function extractSection(body: string, heading: string, level: number = 2): strin
|
|||
}
|
||||
|
||||
/** Extract all sections at a given level, returning heading → content map. */
|
||||
function extractAllSections(body: string, level: number = 2): Map<string, string> {
|
||||
export function extractAllSections(body: string, level: number = 2): Map<string, string> {
|
||||
const prefix = '#'.repeat(level) + ' ';
|
||||
const regex = new RegExp(`^${prefix}(.+)$`, 'gm');
|
||||
const sections = new Map<string, string>();
|
||||
|
|
@ -161,14 +161,14 @@ function escapeRegex(s: string): string {
|
|||
}
|
||||
|
||||
/** Parse bullet list items from a text block. */
|
||||
function parseBullets(text: string): string[] {
|
||||
export function parseBullets(text: string): string[] {
|
||||
return text.split('\n')
|
||||
.map(l => l.replace(/^\s*[-*]\s+/, '').trim())
|
||||
.filter(l => l.length > 0 && !l.startsWith('#'));
|
||||
}
|
||||
|
||||
/** Extract key: value from bold-prefixed lines like "**Key:** Value" */
|
||||
function extractBoldField(text: string, key: string): string | null {
|
||||
export function extractBoldField(text: string, key: string): string | null {
|
||||
const regex = new RegExp(`^\\*\\*${escapeRegex(key)}:\\*\\*\\s*(.+)$`, 'm');
|
||||
const match = regex.exec(text);
|
||||
return match ? match[1].trim() : null;
|
||||
|
|
@ -548,7 +548,7 @@ export function parseRequirementCounts(content: string | null): RequirementCount
|
|||
for (const section of sections) {
|
||||
const text = extractSection(content, section.heading, 2);
|
||||
if (!text) continue;
|
||||
const matches = text.match(/^###\s+R\d+\s+—/gm);
|
||||
const matches = text.match(/^###\s+[A-Z][\w-]*\d+\s+—/gm);
|
||||
counts[section.key] = matches ? matches.length : 0;
|
||||
}
|
||||
|
||||
|
|
|
|||
215
src/resources/extensions/gsd/migrate/command.ts
Normal file
215
src/resources/extensions/gsd/migrate/command.ts
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
/**
|
||||
* /gsd migrate — one-shot migration from .planning to .gsd
|
||||
*
|
||||
* Thin UX orchestrator: resolves paths, runs the validate → parse → transform →
|
||||
* preview → write pipeline, and shows confirmation UI via showNextAction.
|
||||
* All business logic lives in the pipeline modules (S01–S03).
|
||||
*
|
||||
* After a successful write, offers an agent-driven review that audits the
|
||||
* output for GSD-2 standards compliance.
|
||||
*/
|
||||
|
||||
import type { ExtensionAPI, ExtensionCommandContext } from "@mariozechner/pi-coding-agent";
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { resolve, join, dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { showNextAction } from "../../shared/next-action-ui.js";
|
||||
import {
|
||||
validatePlanningDirectory,
|
||||
parsePlanningDirectory,
|
||||
transformToGSD,
|
||||
generatePreview,
|
||||
writeGSDDirectory,
|
||||
} from "./index.js";
|
||||
|
||||
import type { MigrationPreview } from "./writer.js";
|
||||
|
||||
/** Format preview stats for embedding in the review prompt. */
|
||||
function formatPreviewStats(preview: MigrationPreview): string {
|
||||
const lines = [
|
||||
`- Milestones: ${preview.milestoneCount}`,
|
||||
`- Slices: ${preview.totalSlices} (${preview.doneSlices} done — ${preview.sliceCompletionPct}%)`,
|
||||
`- Tasks: ${preview.totalTasks} (${preview.doneTasks} done — ${preview.taskCompletionPct}%)`,
|
||||
];
|
||||
if (preview.requirements.total > 0) {
|
||||
lines.push(
|
||||
`- Requirements: ${preview.requirements.total} (${preview.requirements.validated} validated, ${preview.requirements.active} active, ${preview.requirements.deferred} deferred)`,
|
||||
);
|
||||
}
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
/** Load and interpolate the review-migration prompt template. */
|
||||
function buildReviewPrompt(
|
||||
sourcePath: string,
|
||||
gsdPath: string,
|
||||
preview: MigrationPreview,
|
||||
): string {
|
||||
const promptsDir = join(dirname(fileURLToPath(import.meta.url)), "..", "prompts");
|
||||
const templatePath = join(promptsDir, "review-migration.md");
|
||||
let content = readFileSync(templatePath, "utf-8");
|
||||
|
||||
content = content.replaceAll("{{sourcePath}}", sourcePath);
|
||||
content = content.replaceAll("{{gsdPath}}", gsdPath);
|
||||
content = content.replaceAll("{{previewStats}}", formatPreviewStats(preview));
|
||||
|
||||
return content.trim();
|
||||
}
|
||||
|
||||
/** Dispatch the review prompt to the agent. */
|
||||
function dispatchReview(
|
||||
pi: ExtensionAPI,
|
||||
sourcePath: string,
|
||||
gsdPath: string,
|
||||
preview: MigrationPreview,
|
||||
): void {
|
||||
const prompt = buildReviewPrompt(sourcePath, gsdPath, preview);
|
||||
|
||||
pi.sendMessage(
|
||||
{
|
||||
customType: "gsd-migrate-review",
|
||||
content: prompt,
|
||||
display: false,
|
||||
},
|
||||
{ triggerTurn: true },
|
||||
);
|
||||
}
|
||||
|
||||
export async function handleMigrate(
|
||||
args: string,
|
||||
ctx: ExtensionCommandContext,
|
||||
pi: ExtensionAPI,
|
||||
): Promise<void> {
|
||||
// ── Resolve source path ────────────────────────────────────────────────────
|
||||
// Default to cwd when no args given; expand ~ to HOME
|
||||
let rawPath = args.trim() || ".";
|
||||
if (rawPath.startsWith("~/")) {
|
||||
rawPath = join(process.env.HOME ?? "~", rawPath.slice(2));
|
||||
} else if (rawPath === "~") {
|
||||
rawPath = process.env.HOME ?? "~";
|
||||
}
|
||||
|
||||
let sourcePath = resolve(process.cwd(), rawPath);
|
||||
if (!sourcePath.endsWith(".planning")) {
|
||||
sourcePath = join(sourcePath, ".planning");
|
||||
}
|
||||
|
||||
if (!existsSync(sourcePath)) {
|
||||
ctx.ui.notify(
|
||||
`Directory not found: ${sourcePath}\n\nMake sure the path points to a project root with a .planning directory.`,
|
||||
"error",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// ── Validate ───────────────────────────────────────────────────────────────
|
||||
const validation = await validatePlanningDirectory(sourcePath);
|
||||
|
||||
const warnings = validation.issues.filter((i) => i.severity === "warning");
|
||||
const fatals = validation.issues.filter((i) => i.severity === "fatal");
|
||||
|
||||
for (const w of warnings) {
|
||||
ctx.ui.notify(`⚠ ${w.message} (${w.file})`, "warning");
|
||||
}
|
||||
for (const f of fatals) {
|
||||
ctx.ui.notify(`✖ ${f.message} (${f.file})`, "error");
|
||||
}
|
||||
|
||||
if (!validation.valid) {
|
||||
ctx.ui.notify(
|
||||
"Migration blocked — fix the fatal issues above before retrying.",
|
||||
"error",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// ── Parse → Transform → Preview ───────────────────────────────────────────
|
||||
const parsed = await parsePlanningDirectory(sourcePath);
|
||||
const project = transformToGSD(parsed);
|
||||
const preview = generatePreview(project);
|
||||
|
||||
// ── Build preview text ─────────────────────────────────────────────────────
|
||||
const lines: string[] = [
|
||||
`Milestones: ${preview.milestoneCount}`,
|
||||
`Slices: ${preview.totalSlices} (${preview.doneSlices} done — ${preview.sliceCompletionPct}%)`,
|
||||
`Tasks: ${preview.totalTasks} (${preview.doneTasks} done — ${preview.taskCompletionPct}%)`,
|
||||
];
|
||||
|
||||
if (preview.requirements.total > 0) {
|
||||
lines.push(
|
||||
`Requirements: ${preview.requirements.total} (${preview.requirements.validated} validated, ${preview.requirements.active} active, ${preview.requirements.deferred} deferred)`,
|
||||
);
|
||||
}
|
||||
|
||||
const targetGsdExists = existsSync(join(process.cwd(), ".gsd"));
|
||||
if (targetGsdExists) {
|
||||
lines.push("");
|
||||
lines.push("⚠ A .gsd directory already exists in the current working directory — it will be overwritten.");
|
||||
}
|
||||
|
||||
// ── Confirmation via showNextAction ────────────────────────────────────────
|
||||
const choice = await showNextAction(ctx as any, {
|
||||
title: "Migration preview",
|
||||
summary: lines,
|
||||
actions: [
|
||||
{
|
||||
id: "confirm",
|
||||
label: "Write .gsd directory",
|
||||
description: `Migrate ${preview.milestoneCount} milestone(s) to ${process.cwd()}/.gsd`,
|
||||
recommended: true,
|
||||
},
|
||||
{
|
||||
id: "cancel",
|
||||
label: "Cancel",
|
||||
description: "Exit without writing anything",
|
||||
},
|
||||
],
|
||||
notYetMessage: "Run /gsd migrate again when ready.",
|
||||
});
|
||||
|
||||
if (choice !== "confirm") {
|
||||
ctx.ui.notify("Migration cancelled — no files were written.", "info");
|
||||
return;
|
||||
}
|
||||
|
||||
// ── Write ──────────────────────────────────────────────────────────────────
|
||||
ctx.ui.notify("Writing .gsd directory…", "info");
|
||||
|
||||
const result = await writeGSDDirectory(project, process.cwd());
|
||||
const gsdPath = join(process.cwd(), ".gsd");
|
||||
|
||||
ctx.ui.notify(
|
||||
`✓ Migration complete — ${result.paths.length} file(s) written to .gsd/`,
|
||||
"info",
|
||||
);
|
||||
|
||||
// ── Post-write review offer ────────────────────────────────────────────────
|
||||
const reviewChoice = await showNextAction(ctx as any, {
|
||||
title: "Migration written",
|
||||
summary: [
|
||||
`${result.paths.length} files written to .gsd/`,
|
||||
"",
|
||||
"The agent can now review the migrated output against GSD-2 standards —",
|
||||
"checking structure, content quality, deriveState() round-trip, and",
|
||||
"requirement statuses. It will fix minor issues in-place.",
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
id: "review",
|
||||
label: "Review migration",
|
||||
description: "Agent audits the .gsd output and reports PASS/FAIL per category",
|
||||
recommended: true,
|
||||
},
|
||||
{
|
||||
id: "skip",
|
||||
label: "Skip review",
|
||||
description: "Trust the migration output as-is",
|
||||
},
|
||||
],
|
||||
notYetMessage: "Run /gsd migrate again to re-migrate, or review .gsd manually.",
|
||||
});
|
||||
|
||||
if (reviewChoice === "review") {
|
||||
dispatchReview(pi, sourcePath, gsdPath, preview);
|
||||
}
|
||||
}
|
||||
42
src/resources/extensions/gsd/migrate/index.ts
Normal file
42
src/resources/extensions/gsd/migrate/index.ts
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
// Barrel export for old .planning migration module
|
||||
|
||||
export { handleMigrate } from './command.ts';
|
||||
export { parsePlanningDirectory } from './parser.ts';
|
||||
export { validatePlanningDirectory } from './validator.ts';
|
||||
export { transformToGSD } from './transformer.ts';
|
||||
export { writeGSDDirectory } from './writer.ts';
|
||||
export type { WrittenFiles, MigrationPreview } from './writer.ts';
|
||||
export { generatePreview } from './preview.ts';
|
||||
export type {
|
||||
// Input types (old .planning format)
|
||||
PlanningProject,
|
||||
PlanningPhase,
|
||||
PlanningPlan,
|
||||
PlanningPlanFrontmatter,
|
||||
PlanningPlanMustHaves,
|
||||
PlanningSummary,
|
||||
PlanningSummaryFrontmatter,
|
||||
PlanningSummaryRequires,
|
||||
PlanningRoadmap,
|
||||
PlanningRoadmapMilestone,
|
||||
PlanningRoadmapEntry,
|
||||
PlanningRequirement,
|
||||
PlanningResearch,
|
||||
PlanningConfig,
|
||||
PlanningQuickTask,
|
||||
PlanningMilestone,
|
||||
PlanningState,
|
||||
PlanningPhaseFile,
|
||||
ValidationResult,
|
||||
ValidationIssue,
|
||||
ValidationSeverity,
|
||||
// Output types (GSD-2 format)
|
||||
GSDProject,
|
||||
GSDMilestone,
|
||||
GSDSlice,
|
||||
GSDTask,
|
||||
GSDRequirement,
|
||||
GSDSliceSummaryData,
|
||||
GSDTaskSummaryData,
|
||||
GSDBoundaryEntry,
|
||||
} from './types.ts';
|
||||
323
src/resources/extensions/gsd/migrate/parser.ts
Normal file
323
src/resources/extensions/gsd/migrate/parser.ts
Normal file
|
|
@ -0,0 +1,323 @@
|
|||
// Old .planning directory parser orchestrator
|
||||
// Walks a .planning directory tree, delegates to per-file parsers,
|
||||
// and assembles the complete typed PlanningProject.
|
||||
// Zero Pi dependencies — uses only Node built-ins + local parsers.
|
||||
|
||||
import { existsSync, readdirSync, readFileSync, statSync } from 'node:fs';
|
||||
import { join, basename } from 'node:path';
|
||||
|
||||
import {
|
||||
parseOldRoadmap,
|
||||
parseOldPlan,
|
||||
parseOldSummary,
|
||||
parseOldRequirements,
|
||||
parseOldProject,
|
||||
parseOldState,
|
||||
parseOldConfig,
|
||||
} from './parsers.ts';
|
||||
import { validatePlanningDirectory } from './validator.ts';
|
||||
|
||||
import type {
|
||||
PlanningProject,
|
||||
PlanningPhase,
|
||||
PlanningQuickTask,
|
||||
PlanningMilestone,
|
||||
PlanningResearch,
|
||||
PlanningPhaseFile,
|
||||
} from './types.ts';
|
||||
|
||||
// ─── Helpers ───────────────────────────────────────────────────────────────
|
||||
|
||||
/** Read a file, returning null if it doesn't exist. */
|
||||
function readOptional(path: string): string | null {
|
||||
try {
|
||||
return readFileSync(path, 'utf-8');
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** List directory entries (names only), returning [] if dir doesn't exist. */
|
||||
function listDir(path: string): string[] {
|
||||
try {
|
||||
return readdirSync(path);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/** Check if a path is a directory. */
|
||||
function isDir(path: string): boolean {
|
||||
try {
|
||||
return statSync(path).isDirectory();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/** Extract phase number and slug from a directory name like "29-auth-system" or "01.2-setup". */
|
||||
function parsePhaseDir(dirName: string): { number: number; slug: string } | null {
|
||||
const match = dirName.match(/^(\d+(?:\.\d+)?)-(.+)$/);
|
||||
if (!match) return null;
|
||||
return { number: parseFloat(match[1]), slug: match[2] };
|
||||
}
|
||||
|
||||
/** Extract quick task number and slug from a directory name like "001-fix-login". */
|
||||
function parseQuickDir(dirName: string): { number: number; slug: string } | null {
|
||||
const match = dirName.match(/^(\d+)-(.+)$/);
|
||||
if (!match) return null;
|
||||
return { number: parseInt(match[1], 10), slug: match[2] };
|
||||
}
|
||||
|
||||
// ─── Phase Scanner ─────────────────────────────────────────────────────────
|
||||
|
||||
/** Plan file pattern: NN-NN-PLAN.md (e.g. 29-01-PLAN.md) */
|
||||
const PLAN_RE = /^(\d+(?:\.\d+)?)-(\d+)-PLAN\.md$/i;
|
||||
|
||||
/** Summary file pattern: NN-NN-SUMMARY.md (e.g. 29-01-SUMMARY.md) */
|
||||
const SUMMARY_RE = /^(\d+(?:\.\d+)?)-(\d+)-SUMMARY\.md$/i;
|
||||
|
||||
/** Research file pattern: contains RESEARCH (case-insensitive) */
|
||||
const RESEARCH_RE = /research/i;
|
||||
|
||||
/** Verification file pattern: contains VERIFICATION (case-insensitive) */
|
||||
const VERIFICATION_RE = /verification/i;
|
||||
|
||||
function scanPhaseDirectory(phaseDir: string, dirName: string, parsed: ReturnType<typeof parsePhaseDir>): PlanningPhase {
|
||||
const phase: PlanningPhase = {
|
||||
dirName,
|
||||
number: parsed!.number,
|
||||
slug: parsed!.slug,
|
||||
plans: {},
|
||||
summaries: {},
|
||||
research: [],
|
||||
verifications: [],
|
||||
extraFiles: [],
|
||||
};
|
||||
|
||||
const entries = listDir(phaseDir);
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryPath = join(phaseDir, entry);
|
||||
|
||||
// Skip directories within phase dirs
|
||||
if (isDir(entryPath)) continue;
|
||||
|
||||
const planMatch = entry.match(PLAN_RE);
|
||||
if (planMatch) {
|
||||
const planNumber = planMatch[2];
|
||||
const content = readFileSync(entryPath, 'utf-8');
|
||||
phase.plans[planNumber] = parseOldPlan(content, entry, planNumber);
|
||||
continue;
|
||||
}
|
||||
|
||||
const summaryMatch = entry.match(SUMMARY_RE);
|
||||
if (summaryMatch) {
|
||||
const planNumber = summaryMatch[2];
|
||||
const content = readFileSync(entryPath, 'utf-8');
|
||||
phase.summaries[planNumber] = parseOldSummary(content, entry, planNumber);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (VERIFICATION_RE.test(entry)) {
|
||||
const content = readFileSync(entryPath, 'utf-8');
|
||||
phase.verifications.push({ fileName: entry, content });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (RESEARCH_RE.test(entry)) {
|
||||
const content = readFileSync(entryPath, 'utf-8');
|
||||
phase.research.push({ fileName: entry, content });
|
||||
continue;
|
||||
}
|
||||
|
||||
// Everything else is an extra file
|
||||
const content = readFileSync(entryPath, 'utf-8');
|
||||
phase.extraFiles.push({ fileName: entry, content });
|
||||
}
|
||||
|
||||
return phase;
|
||||
}
|
||||
|
||||
// ─── Quick Task Scanner ────────────────────────────────────────────────────
|
||||
|
||||
function scanQuickDirectory(quickDir: string): PlanningQuickTask[] {
|
||||
const tasks: PlanningQuickTask[] = [];
|
||||
const entries = listDir(quickDir).sort();
|
||||
|
||||
for (const dirName of entries) {
|
||||
const dirPath = join(quickDir, dirName);
|
||||
if (!isDir(dirPath)) continue;
|
||||
|
||||
const parsed = parseQuickDir(dirName);
|
||||
if (!parsed) continue;
|
||||
|
||||
// Look for NNN-PLAN.md and NNN-SUMMARY.md
|
||||
const files = listDir(dirPath);
|
||||
let plan: string | null = null;
|
||||
let summary: string | null = null;
|
||||
|
||||
for (const file of files) {
|
||||
if (/^\d+-PLAN\.md$/i.test(file)) {
|
||||
plan = readFileSync(join(dirPath, file), 'utf-8');
|
||||
} else if (/^\d+-SUMMARY\.md$/i.test(file)) {
|
||||
summary = readFileSync(join(dirPath, file), 'utf-8');
|
||||
}
|
||||
}
|
||||
|
||||
tasks.push({
|
||||
dirName,
|
||||
number: parsed.number,
|
||||
slug: parsed.slug,
|
||||
plan,
|
||||
summary,
|
||||
});
|
||||
}
|
||||
|
||||
return tasks;
|
||||
}
|
||||
|
||||
// ─── Milestones Scanner ────────────────────────────────────────────────────
|
||||
|
||||
function scanMilestonesDirectory(msDir: string): PlanningMilestone[] {
|
||||
const entries = listDir(msDir);
|
||||
if (entries.length === 0) return [];
|
||||
|
||||
// Group files by milestone ID prefix (e.g. "v2.2" from "v2.2-ROADMAP.md")
|
||||
const grouped = new Map<string, { requirements: string | null; roadmap: string | null; extraFiles: PlanningPhaseFile[] }>();
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryPath = join(msDir, entry);
|
||||
if (isDir(entryPath)) continue;
|
||||
|
||||
// Extract milestone ID: everything before the first dash-followed-by-uppercase or common suffix
|
||||
const idMatch = entry.match(/^(.+?)-(ROADMAP|REQUIREMENTS|SUMMARY)\.md$/i);
|
||||
if (idMatch) {
|
||||
const id = idMatch[1];
|
||||
const type = idMatch[2].toUpperCase();
|
||||
if (!grouped.has(id)) grouped.set(id, { requirements: null, roadmap: null, extraFiles: [] });
|
||||
const ms = grouped.get(id)!;
|
||||
const content = readFileSync(entryPath, 'utf-8');
|
||||
|
||||
if (type === 'REQUIREMENTS') ms.requirements = content;
|
||||
else if (type === 'ROADMAP') ms.roadmap = content;
|
||||
else ms.extraFiles.push({ fileName: entry, content });
|
||||
} else {
|
||||
// Non-standard file — try to extract ID from filename
|
||||
const simpleMatch = entry.match(/^(.+?)\./);
|
||||
const id = simpleMatch ? simpleMatch[1] : entry;
|
||||
if (!grouped.has(id)) grouped.set(id, { requirements: null, roadmap: null, extraFiles: [] });
|
||||
const content = readFileSync(entryPath, 'utf-8');
|
||||
grouped.get(id)!.extraFiles.push({ fileName: entry, content });
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(grouped.entries()).map(([id, data]) => ({
|
||||
id,
|
||||
requirements: data.requirements,
|
||||
roadmap: data.roadmap,
|
||||
extraFiles: data.extraFiles,
|
||||
}));
|
||||
}
|
||||
|
||||
// ─── Research Scanner ──────────────────────────────────────────────────────
|
||||
|
||||
function scanResearchDirectory(researchDir: string): PlanningResearch[] {
|
||||
const entries = listDir(researchDir);
|
||||
const research: PlanningResearch[] = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryPath = join(researchDir, entry);
|
||||
if (isDir(entryPath)) continue;
|
||||
const content = readFileSync(entryPath, 'utf-8');
|
||||
research.push({ fileName: entry, content });
|
||||
}
|
||||
|
||||
return research;
|
||||
}
|
||||
|
||||
// ─── Main Orchestrator ─────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse an old .planning directory into a complete typed PlanningProject.
|
||||
*
|
||||
* Handles:
|
||||
* - Top-level files: PROJECT.md, ROADMAP.md, REQUIREMENTS.md, STATE.md, config.json
|
||||
* - Phase directories with plans, summaries, research, verification, extras
|
||||
* - Duplicate phase numbers (full directory name as key)
|
||||
* - .archive/ skipping
|
||||
* - Orphan summaries (summaries without matching plans)
|
||||
* - Quick tasks from quick/ directory
|
||||
* - Milestones from milestones/ directory
|
||||
* - Research from research/ directory
|
||||
*
|
||||
* Missing files produce null values, not thrown errors.
|
||||
* Use validatePlanningDirectory() for pre-flight structural checks.
|
||||
*/
|
||||
export async function parsePlanningDirectory(path: string): Promise<PlanningProject> {
|
||||
// Run validation first
|
||||
const validation = await validatePlanningDirectory(path);
|
||||
|
||||
// Parse top-level files
|
||||
const projectContent = readOptional(join(path, 'PROJECT.md'));
|
||||
const project = projectContent !== null ? parseOldProject(projectContent) : null;
|
||||
|
||||
const roadmapContent = readOptional(join(path, 'ROADMAP.md'));
|
||||
const roadmap = roadmapContent !== null ? parseOldRoadmap(roadmapContent) : null;
|
||||
|
||||
const reqContent = readOptional(join(path, 'REQUIREMENTS.md'));
|
||||
const requirements = reqContent !== null ? parseOldRequirements(reqContent) : [];
|
||||
|
||||
const stateContent = readOptional(join(path, 'STATE.md'));
|
||||
const state = stateContent !== null ? parseOldState(stateContent) : null;
|
||||
|
||||
const configContent = readOptional(join(path, 'config.json'));
|
||||
const config = configContent !== null ? parseOldConfig(configContent) : null;
|
||||
|
||||
// Scan phases/ directory
|
||||
const phases: Record<string, PlanningPhase> = {};
|
||||
const phasesDir = join(path, 'phases');
|
||||
|
||||
if (isDir(phasesDir)) {
|
||||
const phaseDirs = listDir(phasesDir).sort();
|
||||
|
||||
for (const dirName of phaseDirs) {
|
||||
// Skip .archive and hidden directories
|
||||
if (dirName.startsWith('.')) continue;
|
||||
|
||||
const dirPath = join(phasesDir, dirName);
|
||||
if (!isDir(dirPath)) continue;
|
||||
|
||||
const parsed = parsePhaseDir(dirName);
|
||||
if (!parsed) continue;
|
||||
|
||||
phases[dirName] = scanPhaseDirectory(dirPath, dirName, parsed);
|
||||
}
|
||||
}
|
||||
|
||||
// Scan quick/ directory
|
||||
const quickDir = join(path, 'quick');
|
||||
const quickTasks = isDir(quickDir) ? scanQuickDirectory(quickDir) : [];
|
||||
|
||||
// Scan milestones/ directory
|
||||
const msDir = join(path, 'milestones');
|
||||
const milestones = isDir(msDir) ? scanMilestonesDirectory(msDir) : [];
|
||||
|
||||
// Scan research/ directory
|
||||
const researchDir = join(path, 'research');
|
||||
const research = isDir(researchDir) ? scanResearchDirectory(researchDir) : [];
|
||||
|
||||
return {
|
||||
path,
|
||||
project,
|
||||
roadmap,
|
||||
requirements,
|
||||
state,
|
||||
config,
|
||||
phases,
|
||||
quickTasks,
|
||||
milestones,
|
||||
research,
|
||||
validation,
|
||||
};
|
||||
}
|
||||
624
src/resources/extensions/gsd/migrate/parsers.ts
Normal file
624
src/resources/extensions/gsd/migrate/parsers.ts
Normal file
|
|
@ -0,0 +1,624 @@
|
|||
// Old .planning format per-file parsers
|
||||
// Pure functions that take file content (string) and return typed data.
|
||||
// Zero Pi dependencies — uses only exported helpers from files.ts.
|
||||
|
||||
import { splitFrontmatter, parseFrontmatterMap, extractBoldField } from '../files.ts';
|
||||
|
||||
import type {
|
||||
PlanningRoadmap,
|
||||
PlanningRoadmapMilestone,
|
||||
PlanningRoadmapEntry,
|
||||
PlanningPlan,
|
||||
PlanningPlanFrontmatter,
|
||||
PlanningPlanMustHaves,
|
||||
PlanningSummary,
|
||||
PlanningSummaryFrontmatter,
|
||||
PlanningSummaryRequires,
|
||||
PlanningRequirement,
|
||||
PlanningState,
|
||||
PlanningConfig,
|
||||
} from './types.ts';
|
||||
|
||||
// Re-export PlanningProjectMeta — not in types.ts yet, use string for project field
|
||||
// Actually PlanningProjectMeta isn't in types.ts — project is stored as string | null.
|
||||
// We'll keep parseOldProject returning a simple shape.
|
||||
|
||||
// ─── XML-in-Markdown Extraction ────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Extract content between XML-like tags in markdown.
|
||||
* NOT a real XML parser — handles `<tag>content</tag>` with markdown inside.
|
||||
*/
|
||||
function extractXmlTag(content: string, tagName: string): string {
|
||||
const regex = new RegExp(`<${tagName}>([\\s\\S]*?)<\\/${tagName}>`, 'i');
|
||||
const match = regex.exec(content);
|
||||
return match ? match[1].trim() : '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all nested `<task>` entries from within a `<tasks>` block.
|
||||
*/
|
||||
function extractTasks(content: string): string[] {
|
||||
const tasksBlock = extractXmlTag(content, 'tasks');
|
||||
if (!tasksBlock) return [];
|
||||
|
||||
const tasks: string[] = [];
|
||||
const regex = /<task>([\s\S]*?)<\/task>/gi;
|
||||
let match: RegExpExecArray | null;
|
||||
while ((match = regex.exec(tasksBlock)) !== null) {
|
||||
const trimmed = match[1].trim();
|
||||
if (trimmed) tasks.push(trimmed);
|
||||
}
|
||||
return tasks;
|
||||
}
|
||||
|
||||
// ─── Roadmap Parser ────────────────────────────────────────────────────────
|
||||
|
||||
/** Parse a checkbox phase entry line: `- [x] 29 — Auth System` */
|
||||
function parsePhaseEntry(line: string): PlanningRoadmapEntry | null {
|
||||
// Strip bold markers (**) for uniform matching — old roadmaps often bold phase entries
|
||||
const stripped = line.replace(/\*\*/g, '');
|
||||
|
||||
// Format 1: - [x] Phase 25: Title (N/N plans) -- completed ...
|
||||
// Also handles: - [x] Phase 25: Title - Description (completed ...)
|
||||
const fmtPhaseColon = stripped.match(/^-\s+\[([ xX])\]\s+(?:Phase\s+)?(\d+(?:\.\d+)?)\s*:\s*(.+)$/);
|
||||
if (fmtPhaseColon) {
|
||||
let title = fmtPhaseColon[3].trim();
|
||||
// Strip trailing parentheticals, plan counts, and completion notes
|
||||
title = title.replace(/\s*\(\d+\/\d+\s+plans?\)/, '')
|
||||
.replace(/\s*--\s+.*$/, '')
|
||||
.replace(/\s*-\s+.*$/, '') // strip "- description" suffix
|
||||
.replace(/\s*\(completed.*\)$/i, '')
|
||||
.replace(/\s*\(shipped.*\)$/i, '')
|
||||
.trim();
|
||||
return {
|
||||
number: parseFloat(fmtPhaseColon[2]),
|
||||
title,
|
||||
done: fmtPhaseColon[1].toLowerCase() === 'x',
|
||||
raw: line,
|
||||
};
|
||||
}
|
||||
|
||||
// Format 2: - [x] 25 — Title (em-dash/en-dash only — NOT plain hyphen to avoid plan file refs)
|
||||
const fmtDash = stripped.match(/^-\s+\[([ xX])\]\s+(?:Phase\s+)?(\d+(?:\.\d+)?)\s*[—–]\s*(.+)$/);
|
||||
if (fmtDash) {
|
||||
let title = fmtDash[3].trim();
|
||||
title = title.replace(/\s*\(\d+\/\d+\s+plans?\)/, '')
|
||||
.replace(/\s*--\s+.*$/, '')
|
||||
.trim();
|
||||
return {
|
||||
number: parseFloat(fmtDash[2]),
|
||||
title,
|
||||
done: fmtDash[1].toLowerCase() === 'x',
|
||||
raw: line,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse old-format ROADMAP.md.
|
||||
* Handles two formats:
|
||||
* 1. Flat phase lists — checkbox lines under a single Phases heading
|
||||
* 2. Milestone-sectioned — `## v2.0 — Title` headings with optional `<details>` blocks
|
||||
* 3. Details-sectioned — `<details><summary>v1.0 Title (Phases N-M)</summary>` blocks with phase checkboxes inside
|
||||
*/
|
||||
export function parseOldRoadmap(content: string): PlanningRoadmap {
|
||||
const result: PlanningRoadmap = {
|
||||
raw: content,
|
||||
milestones: [],
|
||||
phases: [],
|
||||
};
|
||||
|
||||
const lines = content.split('\n');
|
||||
|
||||
// ─── Strategy 1: Detect <details><summary>vN.N Title</summary> blocks ───
|
||||
// This handles the format where milestones are <details> blocks containing phase checkboxes
|
||||
const detailsMilestones = parseDetailsBlockMilestones(lines);
|
||||
if (detailsMilestones.length > 0) {
|
||||
result.milestones = detailsMilestones;
|
||||
|
||||
// Also check for non-collapsed milestone sections (### v3.0 Title)
|
||||
// that follow the <details> blocks
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const heading = lines[i].match(/^###\s+(v[\d.]+)\s+(.+?)(?:\s*\(.*\))?\s*$/);
|
||||
if (heading) {
|
||||
// Already captured as a details block?
|
||||
const id = heading[1];
|
||||
if (result.milestones.some(m => m.id === id)) continue;
|
||||
|
||||
// Collect phase entries until next ## or ### heading
|
||||
const phases: PlanningRoadmapEntry[] = [];
|
||||
for (let j = i + 1; j < lines.length; j++) {
|
||||
if (/^##?\s/.test(lines[j]) || /^###\s/.test(lines[j])) break;
|
||||
const entry = parsePhaseEntry(lines[j].trim());
|
||||
if (entry) phases.push(entry);
|
||||
}
|
||||
result.milestones.push({
|
||||
id,
|
||||
title: heading[2].trim(),
|
||||
collapsed: false,
|
||||
phases,
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// ─── Strategy 2: Detect ## heading-sectioned milestones ───
|
||||
const milestoneHeadingRegex = /^##\s+(.+)$/;
|
||||
const milestoneHeadings: { index: number; id: string; title: string }[] = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const match = lines[i].match(milestoneHeadingRegex);
|
||||
if (match) {
|
||||
const heading = match[1].trim();
|
||||
// Skip generic headings like "## Phases", "## Milestones", "## Phase Details", "## Progress"
|
||||
if (/^(phases?|milestones?|phase\s+details?|progress)$/i.test(heading)) continue;
|
||||
// Extract milestone ID (e.g. "v2.0" from "v2.0 — Foundation")
|
||||
const idMatch = heading.match(/^(v[\d.]+|[\w.-]+)\s*[—–-]\s*(.+)$/);
|
||||
if (idMatch) {
|
||||
milestoneHeadings.push({ index: i, id: idMatch[1], title: idMatch[2].trim() });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (milestoneHeadings.length > 0) {
|
||||
// Milestone-sectioned format
|
||||
for (let m = 0; m < milestoneHeadings.length; m++) {
|
||||
const startIdx = milestoneHeadings[m].index + 1;
|
||||
const endIdx = m + 1 < milestoneHeadings.length ? milestoneHeadings[m + 1].index : lines.length;
|
||||
const sectionLines = lines.slice(startIdx, endIdx);
|
||||
|
||||
const milestone: PlanningRoadmapMilestone = {
|
||||
id: milestoneHeadings[m].id,
|
||||
title: milestoneHeadings[m].title,
|
||||
collapsed: false,
|
||||
phases: [],
|
||||
};
|
||||
|
||||
// Check for <details> block
|
||||
const sectionText = sectionLines.join('\n');
|
||||
if (sectionText.includes('<details>')) {
|
||||
milestone.collapsed = true;
|
||||
}
|
||||
|
||||
// Extract phase entries from the section (including inside <details>)
|
||||
for (const line of sectionLines) {
|
||||
const entry = parsePhaseEntry(line.trim());
|
||||
if (entry) {
|
||||
milestone.phases.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
result.milestones.push(milestone);
|
||||
}
|
||||
} else {
|
||||
// ─── Strategy 3: Flat format — just extract all phase checkbox lines ───
|
||||
for (const line of lines) {
|
||||
const entry = parsePhaseEntry(line.trim());
|
||||
if (entry) {
|
||||
result.phases.push(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse <details><summary>vN.N Title (Phases N-M)</summary>...</details> blocks.
|
||||
* Each block becomes a milestone with the phase entries inside it.
|
||||
*/
|
||||
function parseDetailsBlockMilestones(lines: string[]): PlanningRoadmapMilestone[] {
|
||||
const milestones: PlanningRoadmapMilestone[] = [];
|
||||
let inDetails = false;
|
||||
let currentMilestone: PlanningRoadmapMilestone | null = null;
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
if (trimmed === '<details>') {
|
||||
inDetails = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (inDetails && !currentMilestone) {
|
||||
// Look for <summary>vN.N Title (Phases N-M) -- STATUS</summary>
|
||||
const summaryMatch = trimmed.match(/<summary>\s*(v[\d.]+)\s+(.+?)\s*(?:\(.*\))?\s*(?:--\s*.*)?\s*<\/summary>/);
|
||||
if (summaryMatch) {
|
||||
currentMilestone = {
|
||||
id: summaryMatch[1],
|
||||
title: summaryMatch[2].trim(),
|
||||
collapsed: true,
|
||||
phases: [],
|
||||
};
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (trimmed === '</details>') {
|
||||
if (currentMilestone) {
|
||||
milestones.push(currentMilestone);
|
||||
currentMilestone = null;
|
||||
}
|
||||
inDetails = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentMilestone) {
|
||||
const entry = parsePhaseEntry(trimmed);
|
||||
if (entry) {
|
||||
currentMilestone.phases.push(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return milestones;
|
||||
}
|
||||
|
||||
// ─── Plan Parser (XML-in-Markdown) ─────────────────────────────────────────
|
||||
|
||||
/** Strip surrounding quotes from YAML string values */
|
||||
function unquote(val: unknown): string {
|
||||
const s = String(val ?? '');
|
||||
if ((s.startsWith('"') && s.endsWith('"')) || (s.startsWith("'") && s.endsWith("'"))) {
|
||||
return s.slice(1, -1);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the must_haves nested structure from frontmatter lines directly.
|
||||
* parseFrontmatterMap doesn't handle 3-level nesting well, so we re-parse.
|
||||
*/
|
||||
function parseMustHavesFromLines(fmLines: string[]): PlanningPlanMustHaves | null {
|
||||
const start = fmLines.findIndex(l => /^must_haves\s*:/.test(l));
|
||||
if (start === -1) return null;
|
||||
|
||||
const truths: string[] = [];
|
||||
const artifacts: string[] = [];
|
||||
const keyLinks: string[] = [];
|
||||
let currentList: string[] | null = null;
|
||||
|
||||
for (let i = start + 1; i < fmLines.length; i++) {
|
||||
const line = fmLines[i];
|
||||
// New top-level key — stop
|
||||
if (/^\w/.test(line)) break;
|
||||
// Sub-key at 2-space indent
|
||||
const subKey = line.match(/^ (\w[\w_]*):/);
|
||||
if (subKey) {
|
||||
const key = subKey[1];
|
||||
if (key === 'truths') currentList = truths;
|
||||
else if (key === 'artifacts') currentList = artifacts;
|
||||
else if (key === 'key_links') currentList = keyLinks;
|
||||
else currentList = null;
|
||||
// Check for inline empty array
|
||||
if (/:\s*\[\]/.test(line)) currentList = null;
|
||||
continue;
|
||||
}
|
||||
// Array item at 4-space indent
|
||||
const item = line.match(/^ - (.+)$/);
|
||||
if (item && currentList) {
|
||||
currentList.push(item[1].trim());
|
||||
}
|
||||
}
|
||||
|
||||
if (truths.length === 0 && artifacts.length === 0 && keyLinks.length === 0) return null;
|
||||
return { truths, artifacts, key_links: keyLinks };
|
||||
}
|
||||
|
||||
function parsePlanFrontmatter(fm: Record<string, unknown>, fmLines: string[] | null): PlanningPlanFrontmatter {
|
||||
const mustHaves = fmLines ? parseMustHavesFromLines(fmLines) : null;
|
||||
|
||||
return {
|
||||
phase: unquote(fm.phase),
|
||||
plan: unquote(fm.plan),
|
||||
type: unquote(fm.type),
|
||||
wave: fm.wave !== undefined ? Number(fm.wave) : null,
|
||||
depends_on: Array.isArray(fm.depends_on) ? fm.depends_on.map(s => unquote(s)) : [],
|
||||
files_modified: Array.isArray(fm.files_modified) ? fm.files_modified.map(s => unquote(s)) : [],
|
||||
autonomous: fm.autonomous === 'true' || fm.autonomous === true,
|
||||
must_haves: mustHaves,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse old-format plan file with YAML frontmatter and XML-in-markdown sections.
|
||||
* Falls back to plain markdown for quick-task plans that lack XML tags.
|
||||
*/
|
||||
export function parseOldPlan(content: string, fileName: string = '', planNumber: string = ''): PlanningPlan {
|
||||
const [fmLines, body] = splitFrontmatter(content);
|
||||
const fm = fmLines ? parseFrontmatterMap(fmLines) : {};
|
||||
const frontmatter = parsePlanFrontmatter(fm, fmLines);
|
||||
|
||||
// Extract XML-in-markdown sections
|
||||
const objective = extractXmlTag(content, 'objective');
|
||||
const tasks = extractTasks(content);
|
||||
const context = extractXmlTag(content, 'context');
|
||||
const verification = extractXmlTag(content, 'verification');
|
||||
const successCriteria = extractXmlTag(content, 'success_criteria');
|
||||
|
||||
return {
|
||||
fileName,
|
||||
planNumber: planNumber || String(fm.plan ?? ''),
|
||||
frontmatter,
|
||||
objective,
|
||||
tasks,
|
||||
context,
|
||||
verification,
|
||||
successCriteria,
|
||||
raw: content,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Summary Parser (YAML Frontmatter) ─────────────────────────────────────
|
||||
|
||||
function parseRequiresArray(raw: unknown): PlanningSummaryRequires[] {
|
||||
if (!Array.isArray(raw)) return [];
|
||||
return raw.map(item => {
|
||||
if (typeof item === 'object' && item !== null) {
|
||||
const obj = item as Record<string, string>;
|
||||
return { phase: obj.phase ?? '', provides: obj.provides ?? '' };
|
||||
}
|
||||
return { phase: '', provides: String(item) };
|
||||
});
|
||||
}
|
||||
|
||||
function toStringArray(val: unknown): string[] {
|
||||
if (Array.isArray(val)) return val.map(String);
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse YAML-like frontmatter lines into a flat key-value map.
|
||||
* Like parseFrontmatterMap but supports hyphenated keys (e.g. `tech-stack:`).
|
||||
*/
|
||||
function parseFrontmatterMapHyphen(lines: string[]): Record<string, unknown> {
|
||||
const result: Record<string, unknown> = {};
|
||||
let currentKey: string | null = null;
|
||||
let currentArray: unknown[] | null = null;
|
||||
let currentObj: Record<string, string> | null = null;
|
||||
|
||||
for (const line of lines) {
|
||||
// Nested object property (4-space indent with key: value)
|
||||
const nestedMatch = line.match(/^ ([\w][\w_-]*)\s*:\s*(.*)$/);
|
||||
if (nestedMatch && currentArray && currentObj) {
|
||||
currentObj[nestedMatch[1]] = nestedMatch[2].trim();
|
||||
continue;
|
||||
}
|
||||
|
||||
// Array item (2-space indent)
|
||||
const arrayMatch = line.match(/^ - (.*)$/);
|
||||
if (arrayMatch && currentKey) {
|
||||
if (currentObj && Object.keys(currentObj).length > 0) {
|
||||
currentArray!.push(currentObj);
|
||||
}
|
||||
currentObj = null;
|
||||
|
||||
const val = arrayMatch[1].trim();
|
||||
if (!currentArray) currentArray = [];
|
||||
|
||||
const nestedStart = val.match(/^([\w][\w_-]*)\s*:\s*(.*)$/);
|
||||
if (nestedStart) {
|
||||
currentObj = { [nestedStart[1]]: nestedStart[2].trim() };
|
||||
} else {
|
||||
currentArray.push(val);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Flush previous key
|
||||
if (currentKey) {
|
||||
if (currentObj && Object.keys(currentObj).length > 0 && currentArray) {
|
||||
currentArray.push(currentObj);
|
||||
currentObj = null;
|
||||
}
|
||||
if (currentArray) {
|
||||
result[currentKey] = currentArray;
|
||||
}
|
||||
currentArray = null;
|
||||
}
|
||||
|
||||
// Top-level key: value (supports hyphens in key names)
|
||||
const kvMatch = line.match(/^([\w][\w_-]*)\s*:\s*(.*)$/);
|
||||
if (kvMatch) {
|
||||
currentKey = kvMatch[1];
|
||||
const val = kvMatch[2].trim();
|
||||
|
||||
if (val === '' || val === '[]') {
|
||||
currentArray = [];
|
||||
} else if (val.startsWith('[') && val.endsWith(']')) {
|
||||
const inner = val.slice(1, -1).trim();
|
||||
result[currentKey] = inner ? inner.split(',').map(s => s.trim()) : [];
|
||||
currentKey = null;
|
||||
} else {
|
||||
result[currentKey] = val;
|
||||
currentKey = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Flush final key
|
||||
if (currentKey) {
|
||||
if (currentObj && Object.keys(currentObj).length > 0 && currentArray) {
|
||||
currentArray.push(currentObj);
|
||||
currentObj = null;
|
||||
}
|
||||
if (currentArray) {
|
||||
result[currentKey] = currentArray;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function parseSummaryFrontmatter(fm: Record<string, unknown>): PlanningSummaryFrontmatter {
|
||||
return {
|
||||
phase: unquote(fm.phase),
|
||||
plan: unquote(fm.plan),
|
||||
subsystem: unquote(fm.subsystem),
|
||||
tags: toStringArray(fm.tags),
|
||||
requires: parseRequiresArray(fm.requires),
|
||||
provides: toStringArray(fm.provides),
|
||||
affects: toStringArray(fm.affects),
|
||||
'tech-stack': toStringArray(fm['tech-stack']),
|
||||
'key-files': toStringArray(fm['key-files']),
|
||||
'key-decisions': toStringArray(fm['key-decisions']),
|
||||
'patterns-established': toStringArray(fm['patterns-established']),
|
||||
duration: unquote(fm.duration),
|
||||
completed: unquote(fm.completed),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse old-format summary file with YAML frontmatter.
|
||||
*/
|
||||
export function parseOldSummary(content: string, fileName: string = '', planNumber: string = ''): PlanningSummary {
|
||||
const [fmLines, body] = splitFrontmatter(content);
|
||||
const fm = fmLines ? parseFrontmatterMapHyphen(fmLines) : {};
|
||||
|
||||
return {
|
||||
fileName,
|
||||
planNumber: planNumber || String(fm.plan ?? ''),
|
||||
frontmatter: parseSummaryFrontmatter(fm),
|
||||
body,
|
||||
raw: content,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Requirements Parser ───────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse old-format REQUIREMENTS.md.
|
||||
* Extracts requirement entries from markdown with status sections and requirement headings.
|
||||
*/
|
||||
export function parseOldRequirements(content: string): PlanningRequirement[] {
|
||||
const requirements: PlanningRequirement[] = [];
|
||||
const lines = content.split('\n');
|
||||
|
||||
let currentStatus = '';
|
||||
let currentReq: Partial<PlanningRequirement> | null = null;
|
||||
let currentRaw: string[] = [];
|
||||
|
||||
function flushReq() {
|
||||
if (currentReq?.id && currentReq?.title) {
|
||||
requirements.push({
|
||||
id: currentReq.id,
|
||||
title: currentReq.title,
|
||||
status: currentReq.status || currentStatus || 'unknown',
|
||||
description: currentReq.description || '',
|
||||
raw: currentRaw.join('\n').trim(),
|
||||
});
|
||||
}
|
||||
currentReq = null;
|
||||
currentRaw = [];
|
||||
}
|
||||
|
||||
for (const line of lines) {
|
||||
// Status section heading (## Active, ## Validated, ## Deferred)
|
||||
const statusMatch = line.match(/^##\s+(\w[\w\s&]*\w)\s*$/);
|
||||
if (statusMatch) {
|
||||
flushReq();
|
||||
currentStatus = statusMatch[1].toLowerCase();
|
||||
continue;
|
||||
}
|
||||
|
||||
// Section heading (### Category Name) — use as context for bullet requirements
|
||||
const sectionMatch = line.match(/^###\s+(.+)$/);
|
||||
if (sectionMatch) {
|
||||
// Check if this is a requirement heading (### R001 — Title)
|
||||
const reqHeading = sectionMatch[1].match(/^(R\d+)\s*[—–-]\s*(.+)$/);
|
||||
if (reqHeading) {
|
||||
flushReq();
|
||||
currentReq = { id: reqHeading[1], title: reqHeading[2].trim(), status: currentStatus, description: '' };
|
||||
currentRaw.push(line);
|
||||
continue;
|
||||
}
|
||||
// Otherwise just note the section — don't flush, could be a category for bullet reqs
|
||||
flushReq();
|
||||
continue;
|
||||
}
|
||||
|
||||
// Bullet-format requirement: - [x] **ID**: Description
|
||||
const bulletReqMatch = line.match(/^-\s+\[([ xX])\]\s+\*\*([^*]+)\*\*\s*:\s*(.+)$/);
|
||||
if (bulletReqMatch) {
|
||||
flushReq();
|
||||
const done = bulletReqMatch[1].toLowerCase() === 'x';
|
||||
const id = bulletReqMatch[2].trim();
|
||||
const desc = bulletReqMatch[3].trim();
|
||||
requirements.push({
|
||||
id,
|
||||
title: desc,
|
||||
status: done ? 'complete' : (currentStatus || 'active'),
|
||||
description: desc,
|
||||
raw: line,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Description or metadata within a requirement
|
||||
if (currentReq) {
|
||||
currentRaw.push(line);
|
||||
const descMatch = line.match(/^-\s+Description:\s*(.+)$/);
|
||||
if (descMatch) {
|
||||
currentReq.description = descMatch[1].trim();
|
||||
continue;
|
||||
}
|
||||
const statMatch = line.match(/^-\s+Status:\s*(.+)$/);
|
||||
if (statMatch) {
|
||||
currentReq.status = statMatch[1].trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
flushReq();
|
||||
return requirements;
|
||||
}
|
||||
|
||||
// ─── Project Parser ────────────────────────────────────────────────────────
|
||||
|
||||
// PlanningProjectMeta isn't in types.ts — project field on PlanningProject is `string | null`.
|
||||
// This parser returns the raw content as a string. The top-level parser stores it directly.
|
||||
|
||||
/**
|
||||
* Parse old-format PROJECT.md.
|
||||
* Returns the raw content as a string (stored as project field on PlanningProject).
|
||||
*/
|
||||
export function parseOldProject(content: string): string {
|
||||
return content;
|
||||
}
|
||||
|
||||
// ─── State Parser ──────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse old-format STATE.md.
|
||||
* Extracts current phase and status from bold-field patterns.
|
||||
*/
|
||||
export function parseOldState(content: string): PlanningState {
|
||||
const currentPhase = extractBoldField(content, 'Current Phase');
|
||||
const status = extractBoldField(content, 'Status');
|
||||
|
||||
return {
|
||||
raw: content,
|
||||
currentPhase,
|
||||
status,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Config Parser ─────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse old-format config.json.
|
||||
* Returns null on invalid JSON (graceful error handling).
|
||||
*/
|
||||
export function parseOldConfig(content: string): PlanningConfig | null {
|
||||
try {
|
||||
const parsed = JSON.parse(content);
|
||||
if (typeof parsed !== 'object' || parsed === null) return null;
|
||||
return parsed as PlanningConfig;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
48
src/resources/extensions/gsd/migrate/preview.ts
Normal file
48
src/resources/extensions/gsd/migrate/preview.ts
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
// GSD Migration Preview — Pre-write statistics
|
||||
// Pure function, no I/O. Computes counts from a GSDProject.
|
||||
|
||||
import type { GSDProject } from './types.ts';
|
||||
import type { MigrationPreview } from './writer.ts';
|
||||
|
||||
/**
|
||||
* Compute pre-write statistics from a GSDProject without performing I/O.
|
||||
* Used to show the user what a migration will produce before writing anything.
|
||||
*/
|
||||
export function generatePreview(project: GSDProject): MigrationPreview {
|
||||
let totalSlices = 0;
|
||||
let totalTasks = 0;
|
||||
let doneSlices = 0;
|
||||
let doneTasks = 0;
|
||||
|
||||
for (const milestone of project.milestones) {
|
||||
for (const slice of milestone.slices) {
|
||||
totalSlices++;
|
||||
if (slice.done) doneSlices++;
|
||||
for (const task of slice.tasks) {
|
||||
totalTasks++;
|
||||
if (task.done) doneTasks++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const reqCounts = { active: 0, validated: 0, deferred: 0, outOfScope: 0, total: 0 };
|
||||
for (const req of project.requirements) {
|
||||
const status = req.status.toLowerCase();
|
||||
if (status === 'active') reqCounts.active++;
|
||||
else if (status === 'validated') reqCounts.validated++;
|
||||
else if (status === 'deferred') reqCounts.deferred++;
|
||||
else if (status === 'out-of-scope') reqCounts.outOfScope++;
|
||||
reqCounts.total++;
|
||||
}
|
||||
|
||||
return {
|
||||
milestoneCount: project.milestones.length,
|
||||
totalSlices,
|
||||
totalTasks,
|
||||
doneSlices,
|
||||
doneTasks,
|
||||
sliceCompletionPct: totalSlices > 0 ? Math.round((doneSlices / totalSlices) * 100) : 0,
|
||||
taskCompletionPct: totalTasks > 0 ? Math.round((doneTasks / totalTasks) * 100) : 0,
|
||||
requirements: reqCounts,
|
||||
};
|
||||
}
|
||||
346
src/resources/extensions/gsd/migrate/transformer.ts
Normal file
346
src/resources/extensions/gsd/migrate/transformer.ts
Normal file
|
|
@ -0,0 +1,346 @@
|
|||
// Migration transformer — converts parsed PlanningProject into GSDProject.
|
||||
// Pure function: no I/O, no side effects, no imports outside migrate/.
|
||||
|
||||
import type {
|
||||
PlanningProject,
|
||||
PlanningPhase,
|
||||
PlanningPlan,
|
||||
PlanningSummary,
|
||||
PlanningRoadmapEntry,
|
||||
PlanningRoadmapMilestone,
|
||||
PlanningResearch,
|
||||
PlanningRequirement,
|
||||
GSDProject,
|
||||
GSDMilestone,
|
||||
GSDSlice,
|
||||
GSDTask,
|
||||
GSDRequirement,
|
||||
GSDSliceSummaryData,
|
||||
GSDTaskSummaryData,
|
||||
GSDBoundaryEntry,
|
||||
} from './types.ts';
|
||||
|
||||
// ─── Helpers ───────────────────────────────────────────────────────────────
|
||||
|
||||
function padId(prefix: string, n: number, width = 2): string {
|
||||
return `${prefix}${String(n).padStart(width, '0')}`;
|
||||
}
|
||||
|
||||
function milestoneId(n: number): string {
|
||||
return padId('M', n, 3);
|
||||
}
|
||||
|
||||
function kebabToTitle(slug: string): string {
|
||||
return slug
|
||||
.split('-')
|
||||
.map((w) => w.charAt(0).toUpperCase() + w.slice(1))
|
||||
.join(' ');
|
||||
}
|
||||
|
||||
function firstSentence(text: string): string {
|
||||
const trimmed = text.trim();
|
||||
const match = trimmed.match(/^[^.!?]*[.!?]/);
|
||||
return match ? match[0].trim() : trimmed;
|
||||
}
|
||||
|
||||
/** Preferred research ordering for consolidation. */
|
||||
const RESEARCH_ORDER = ['SUMMARY.md', 'ARCHITECTURE.md', 'STACK.md', 'FEATURES.md', 'PITFALLS.md'];
|
||||
|
||||
function sortResearch(files: PlanningResearch[]): PlanningResearch[] {
|
||||
return [...files].sort((a, b) => {
|
||||
const ai = RESEARCH_ORDER.indexOf(a.fileName);
|
||||
const bi = RESEARCH_ORDER.indexOf(b.fileName);
|
||||
const aw = ai === -1 ? RESEARCH_ORDER.length : ai;
|
||||
const bw = bi === -1 ? RESEARCH_ORDER.length : bi;
|
||||
if (aw !== bw) return aw - bw;
|
||||
return a.fileName.localeCompare(b.fileName);
|
||||
});
|
||||
}
|
||||
|
||||
function consolidateResearch(files: PlanningResearch[]): string | null {
|
||||
if (files.length === 0) return null;
|
||||
return sortResearch(files)
|
||||
.map((f) => f.content.trim())
|
||||
.join('\n\n');
|
||||
}
|
||||
|
||||
// ─── Task Mapping ──────────────────────────────────────────────────────────
|
||||
|
||||
function buildTaskSummary(summary: PlanningSummary): GSDTaskSummaryData {
|
||||
return {
|
||||
completedAt: summary.frontmatter.completed ?? '',
|
||||
provides: summary.frontmatter.provides ?? [],
|
||||
keyFiles: summary.frontmatter['key-files'] ?? [],
|
||||
duration: summary.frontmatter.duration ?? '',
|
||||
whatHappened: summary.body?.trim() ?? '',
|
||||
};
|
||||
}
|
||||
|
||||
function mapTask(plan: PlanningPlan, index: number, summaries: Record<string, PlanningSummary>): GSDTask {
|
||||
const summary = summaries[plan.planNumber];
|
||||
const done = summary !== undefined;
|
||||
return {
|
||||
id: padId('T', index + 1),
|
||||
title: buildTaskTitle(plan),
|
||||
description: plan.objective ?? '',
|
||||
done,
|
||||
estimate: done ? (summary.frontmatter.duration ?? '') : '',
|
||||
files: plan.frontmatter.files_modified ?? [],
|
||||
mustHaves: plan.frontmatter.must_haves?.truths ?? [],
|
||||
summary: done ? buildTaskSummary(summary) : null,
|
||||
};
|
||||
}
|
||||
|
||||
function buildTaskTitle(plan: PlanningPlan): string {
|
||||
const fm = plan.frontmatter;
|
||||
if (fm.phase && fm.plan) {
|
||||
return `${fm.phase} ${fm.plan}`;
|
||||
}
|
||||
return `Plan ${plan.planNumber}`;
|
||||
}
|
||||
|
||||
// ─── Slice Mapping ─────────────────────────────────────────────────────────
|
||||
|
||||
function buildSliceSummary(phase: PlanningPhase): GSDSliceSummaryData | null {
|
||||
// Aggregate from all summaries in the phase
|
||||
const summaryEntries = Object.values(phase.summaries);
|
||||
if (summaryEntries.length === 0) return null;
|
||||
|
||||
const provides: string[] = [];
|
||||
const keyFiles: string[] = [];
|
||||
const keyDecisions: string[] = [];
|
||||
const patternsEstablished: string[] = [];
|
||||
let lastCompleted = '';
|
||||
let totalDuration = '';
|
||||
const bodies: string[] = [];
|
||||
|
||||
for (const s of summaryEntries) {
|
||||
provides.push(...(s.frontmatter.provides ?? []));
|
||||
keyFiles.push(...(s.frontmatter['key-files'] ?? []));
|
||||
keyDecisions.push(...(s.frontmatter['key-decisions'] ?? []));
|
||||
patternsEstablished.push(...(s.frontmatter['patterns-established'] ?? []));
|
||||
if (s.frontmatter.completed) lastCompleted = s.frontmatter.completed;
|
||||
if (s.frontmatter.duration) totalDuration = s.frontmatter.duration;
|
||||
if (s.body?.trim()) bodies.push(s.body.trim());
|
||||
}
|
||||
|
||||
return {
|
||||
completedAt: lastCompleted,
|
||||
provides,
|
||||
keyFiles,
|
||||
keyDecisions,
|
||||
patternsEstablished,
|
||||
duration: totalDuration,
|
||||
whatHappened: bodies.join('\n\n'),
|
||||
};
|
||||
}
|
||||
|
||||
function deriveDemo(phase: PlanningPhase, slug: string): string {
|
||||
// First plan's objective, first sentence
|
||||
const planNumbers = Object.keys(phase.plans).sort((a, b) => Number(a) - Number(b));
|
||||
if (planNumbers.length > 0) {
|
||||
const firstPlan = phase.plans[planNumbers[0]];
|
||||
if (firstPlan?.objective) {
|
||||
return firstSentence(firstPlan.objective);
|
||||
}
|
||||
}
|
||||
return `unit tests prove ${slug} works`;
|
||||
}
|
||||
|
||||
function mapSlice(
|
||||
phase: PlanningPhase | undefined,
|
||||
entry: PlanningRoadmapEntry,
|
||||
index: number,
|
||||
prevSliceId: string | null,
|
||||
): GSDSlice {
|
||||
const sliceId = padId('S', index + 1);
|
||||
const slug = phase?.slug ?? entry.title;
|
||||
const demo = phase ? deriveDemo(phase, slug) : `unit tests prove ${entry.title} works`;
|
||||
|
||||
let tasks: GSDTask[] = [];
|
||||
if (phase) {
|
||||
const planNumbers = Object.keys(phase.plans).sort((a, b) => Number(a) - Number(b));
|
||||
tasks = planNumbers.map((pn, i) => mapTask(phase.plans[pn], i, phase.summaries));
|
||||
}
|
||||
|
||||
const done = entry.done;
|
||||
const sliceSummary = done && phase ? buildSliceSummary(phase) : null;
|
||||
|
||||
return {
|
||||
id: sliceId,
|
||||
title: kebabToTitle(slug),
|
||||
risk: 'medium',
|
||||
depends: prevSliceId ? [prevSliceId] : [],
|
||||
done,
|
||||
demo,
|
||||
goal: demo,
|
||||
tasks,
|
||||
research: phase ? consolidateResearch(phase.research) : null,
|
||||
summary: sliceSummary,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Milestone Building ───────────────────────────────────────────────────
|
||||
|
||||
function findPhase(phases: Record<string, PlanningPhase>, phaseNumber: number, entryTitle?: string): PlanningPhase | undefined {
|
||||
const matches = Object.values(phases).filter((p) => p.number === phaseNumber);
|
||||
if (matches.length <= 1) return matches[0];
|
||||
// Multiple phases with the same number — try to match by title/slug similarity
|
||||
if (entryTitle) {
|
||||
const normalizedTitle = entryTitle.toLowerCase().replace(/[^a-z0-9]+/g, ' ').trim();
|
||||
const best = matches.find((p) => {
|
||||
const normalizedSlug = p.slug.replace(/-/g, ' ').toLowerCase();
|
||||
return normalizedSlug === normalizedTitle || normalizedTitle.includes(normalizedSlug) || normalizedSlug.includes(normalizedTitle);
|
||||
});
|
||||
if (best) return best;
|
||||
}
|
||||
return matches[0];
|
||||
}
|
||||
|
||||
function buildMilestoneFromEntries(
|
||||
id: string,
|
||||
title: string,
|
||||
entries: PlanningRoadmapEntry[],
|
||||
phases: Record<string, PlanningPhase>,
|
||||
research: PlanningResearch[],
|
||||
): GSDMilestone {
|
||||
// Sort entries by phase number (float sort)
|
||||
const sorted = [...entries].sort((a, b) => a.number - b.number);
|
||||
|
||||
const slices: GSDSlice[] = [];
|
||||
for (let i = 0; i < sorted.length; i++) {
|
||||
const entry = sorted[i];
|
||||
const phase = findPhase(phases, entry.number, entry.title);
|
||||
const prevId = i > 0 ? slices[i - 1].id : null;
|
||||
slices.push(mapSlice(phase, entry, i, prevId));
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
title,
|
||||
vision: '',
|
||||
successCriteria: [],
|
||||
slices,
|
||||
research: consolidateResearch(research),
|
||||
boundaryMap: [],
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Requirements Mapping ──────────────────────────────────────────────────
|
||||
|
||||
const VALID_STATUSES = new Set(['active', 'validated', 'deferred']);
|
||||
const COMPLETE_ALIASES = new Set(['complete', 'completed', 'done', 'shipped']);
|
||||
|
||||
function normalizeStatus(status: string): 'active' | 'validated' | 'deferred' {
|
||||
const lower = status.toLowerCase().trim();
|
||||
if (VALID_STATUSES.has(lower)) return lower as 'active' | 'validated' | 'deferred';
|
||||
if (COMPLETE_ALIASES.has(lower)) return 'validated';
|
||||
return 'active';
|
||||
}
|
||||
|
||||
function mapRequirements(reqs: PlanningRequirement[]): GSDRequirement[] {
|
||||
let autoId = 0;
|
||||
return reqs.map((req) => {
|
||||
autoId++;
|
||||
return {
|
||||
id: req.id && req.id.trim() !== '' ? req.id : padId('R', autoId, 3),
|
||||
title: req.title,
|
||||
class: 'core-capability',
|
||||
status: normalizeStatus(req.status),
|
||||
description: req.description,
|
||||
source: 'inferred',
|
||||
primarySlice: 'none yet',
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Project-Level Derivation ──────────────────────────────────────────────
|
||||
|
||||
function deriveVision(parsed: PlanningProject): string {
|
||||
// Try first non-heading line from PROJECT.md
|
||||
if (parsed.project) {
|
||||
const lines = parsed.project.split('\n');
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (trimmed && !trimmed.startsWith('#')) {
|
||||
return firstSentence(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Fallback: roadmap title
|
||||
if (parsed.roadmap) {
|
||||
if (parsed.roadmap.milestones.length > 0) {
|
||||
return parsed.roadmap.milestones[0].title;
|
||||
}
|
||||
}
|
||||
return 'Project migration from .planning format';
|
||||
}
|
||||
|
||||
function deriveDecisions(parsed: PlanningProject): string {
|
||||
// Extract key decisions from phase summaries if available
|
||||
const decisions: string[] = [];
|
||||
for (const phase of Object.values(parsed.phases)) {
|
||||
for (const summary of Object.values(phase.summaries)) {
|
||||
const kd = summary.frontmatter['key-decisions'] ?? [];
|
||||
decisions.push(...kd);
|
||||
}
|
||||
}
|
||||
if (decisions.length === 0) return '';
|
||||
return decisions.map((d) => `- ${d}`).join('\n');
|
||||
}
|
||||
|
||||
// ─── Main Entry Point ──────────────────────────────────────────────────────
|
||||
|
||||
export function transformToGSD(parsed: PlanningProject): GSDProject {
|
||||
const milestones: GSDMilestone[] = [];
|
||||
|
||||
const roadmap = parsed.roadmap;
|
||||
const isMultiMilestone = roadmap !== null && roadmap.milestones.length > 0;
|
||||
const hasFlatPhases = roadmap !== null && roadmap.phases.length > 0;
|
||||
|
||||
if (isMultiMilestone) {
|
||||
// Multi-milestone mode: each roadmap milestone section → one GSDMilestone
|
||||
for (let mi = 0; mi < roadmap!.milestones.length; mi++) {
|
||||
const rm = roadmap!.milestones[mi];
|
||||
milestones.push(
|
||||
buildMilestoneFromEntries(
|
||||
milestoneId(mi + 1),
|
||||
rm.title,
|
||||
rm.phases,
|
||||
parsed.phases,
|
||||
mi === 0 ? parsed.research : [],
|
||||
),
|
||||
);
|
||||
}
|
||||
} else if (hasFlatPhases) {
|
||||
// Single-milestone mode from roadmap phases
|
||||
milestones.push(
|
||||
buildMilestoneFromEntries('M001', 'Migration', roadmap!.phases, parsed.phases, parsed.research),
|
||||
);
|
||||
} else {
|
||||
// Null/empty roadmap fallback: use filesystem phases, all not-done
|
||||
const fsPhases = Object.values(parsed.phases).sort((a, b) => a.number - b.number);
|
||||
const entries: PlanningRoadmapEntry[] = fsPhases.map((p) => ({
|
||||
number: p.number,
|
||||
title: p.slug,
|
||||
done: false,
|
||||
raw: '',
|
||||
}));
|
||||
milestones.push(
|
||||
buildMilestoneFromEntries('M001', 'Migration', entries, parsed.phases, parsed.research),
|
||||
);
|
||||
}
|
||||
|
||||
// Set vision on first milestone (or all if multi)
|
||||
const vision = deriveVision(parsed);
|
||||
for (const m of milestones) {
|
||||
if (!m.vision) m.vision = vision;
|
||||
}
|
||||
|
||||
return {
|
||||
milestones,
|
||||
projectContent: parsed.project ?? '',
|
||||
requirements: mapRequirements(parsed.requirements),
|
||||
decisionsContent: deriveDecisions(parsed),
|
||||
};
|
||||
}
|
||||
370
src/resources/extensions/gsd/migrate/types.ts
Normal file
370
src/resources/extensions/gsd/migrate/types.ts
Normal file
|
|
@ -0,0 +1,370 @@
|
|||
// Old .planning format type definitions
|
||||
// Defines the contract for parsing legacy .planning directories into typed structures.
|
||||
// Zero Pi dependencies — pure type definitions only.
|
||||
|
||||
// ─── Validation ────────────────────────────────────────────────────────────
|
||||
|
||||
export type ValidationSeverity = 'fatal' | 'warning';
|
||||
|
||||
export interface ValidationIssue {
|
||||
file: string;
|
||||
severity: ValidationSeverity;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean;
|
||||
issues: ValidationIssue[];
|
||||
}
|
||||
|
||||
// ─── Top-Level Container ───────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningProject {
|
||||
/** Absolute path to the .planning directory */
|
||||
path: string;
|
||||
/** Parsed PROJECT.md content, null if missing */
|
||||
project: string | null;
|
||||
/** Parsed ROADMAP.md */
|
||||
roadmap: PlanningRoadmap | null;
|
||||
/** Parsed REQUIREMENTS.md entries */
|
||||
requirements: PlanningRequirement[];
|
||||
/** Parsed STATE.md */
|
||||
state: PlanningState | null;
|
||||
/** Parsed config.json */
|
||||
config: PlanningConfig | null;
|
||||
/** Phase directories keyed by full directory name (e.g. "29-auth-system") */
|
||||
phases: Record<string, PlanningPhase>;
|
||||
/** Quick tasks from quick/ directory */
|
||||
quickTasks: PlanningQuickTask[];
|
||||
/** Milestone-level data from milestones/ directory */
|
||||
milestones: PlanningMilestone[];
|
||||
/** Research files from top-level research/ directory */
|
||||
research: PlanningResearch[];
|
||||
/** Validation result from pre-flight checks */
|
||||
validation: ValidationResult;
|
||||
}
|
||||
|
||||
// ─── Roadmap ───────────────────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningRoadmap {
|
||||
/** Raw content for reference */
|
||||
raw: string;
|
||||
/** Milestone sections (for milestone-sectioned roadmaps) */
|
||||
milestones: PlanningRoadmapMilestone[];
|
||||
/** Flat phase entries (for simple flat roadmaps) */
|
||||
phases: PlanningRoadmapEntry[];
|
||||
}
|
||||
|
||||
export interface PlanningRoadmapMilestone {
|
||||
/** Milestone identifier (e.g. "v2.5") */
|
||||
id: string;
|
||||
/** Milestone title */
|
||||
title: string;
|
||||
/** Whether the milestone section is collapsed (inside <details>) */
|
||||
collapsed: boolean;
|
||||
/** Phase entries within this milestone */
|
||||
phases: PlanningRoadmapEntry[];
|
||||
}
|
||||
|
||||
export interface PlanningRoadmapEntry {
|
||||
/** Phase number */
|
||||
number: number;
|
||||
/** Phase title/slug */
|
||||
title: string;
|
||||
/** Whether the phase checkbox is checked */
|
||||
done: boolean;
|
||||
/** Raw line text for reference */
|
||||
raw: string;
|
||||
}
|
||||
|
||||
// ─── Phase ─────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningPhase {
|
||||
/** Full directory name (e.g. "29-auth-system") */
|
||||
dirName: string;
|
||||
/** Extracted phase number */
|
||||
number: number;
|
||||
/** Extracted phase slug */
|
||||
slug: string;
|
||||
/** Plan files keyed by plan number (e.g. "01") */
|
||||
plans: Record<string, PlanningPlan>;
|
||||
/** Summary files keyed by plan number (e.g. "01"), includes orphans */
|
||||
summaries: Record<string, PlanningSummary>;
|
||||
/** Research files in phase directory */
|
||||
research: PlanningResearch[];
|
||||
/** Verification files */
|
||||
verifications: PlanningPhaseFile[];
|
||||
/** Non-standard extra files */
|
||||
extraFiles: PlanningPhaseFile[];
|
||||
}
|
||||
|
||||
// ─── Plan (XML-in-Markdown) ────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningPlan {
|
||||
/** File name (e.g. "29-01-PLAN.md") */
|
||||
fileName: string;
|
||||
/** Plan number within phase (e.g. "01") */
|
||||
planNumber: string;
|
||||
/** Parsed YAML frontmatter */
|
||||
frontmatter: PlanningPlanFrontmatter;
|
||||
/** Extracted <objective> content */
|
||||
objective: string;
|
||||
/** Extracted <tasks> with individual <task> entries */
|
||||
tasks: string[];
|
||||
/** Extracted <context> content */
|
||||
context: string;
|
||||
/** Extracted <verification> content */
|
||||
verification: string;
|
||||
/** Extracted <success_criteria> content */
|
||||
successCriteria: string;
|
||||
/** Raw content for reference */
|
||||
raw: string;
|
||||
}
|
||||
|
||||
export interface PlanningPlanFrontmatter {
|
||||
phase: string;
|
||||
plan: string;
|
||||
type: string;
|
||||
wave: number | null;
|
||||
depends_on: string[];
|
||||
files_modified: string[];
|
||||
autonomous: boolean;
|
||||
must_haves: PlanningPlanMustHaves | null;
|
||||
}
|
||||
|
||||
export interface PlanningPlanMustHaves {
|
||||
truths: string[];
|
||||
artifacts: string[];
|
||||
key_links: string[];
|
||||
}
|
||||
|
||||
// ─── Summary (YAML Frontmatter) ────────────────────────────────────────────
|
||||
|
||||
export interface PlanningSummary {
|
||||
/** File name (e.g. "29-01-SUMMARY.md") */
|
||||
fileName: string;
|
||||
/** Plan number within phase (e.g. "01") */
|
||||
planNumber: string;
|
||||
/** Parsed YAML frontmatter */
|
||||
frontmatter: PlanningSummaryFrontmatter;
|
||||
/** Body content (after frontmatter) */
|
||||
body: string;
|
||||
/** Raw content for reference */
|
||||
raw: string;
|
||||
}
|
||||
|
||||
export interface PlanningSummaryFrontmatter {
|
||||
phase: string;
|
||||
plan: string;
|
||||
subsystem: string;
|
||||
tags: string[];
|
||||
requires: PlanningSummaryRequires[];
|
||||
provides: string[];
|
||||
affects: string[];
|
||||
'tech-stack': string[];
|
||||
'key-files': string[];
|
||||
'key-decisions': string[];
|
||||
'patterns-established': string[];
|
||||
duration: string;
|
||||
completed: string;
|
||||
}
|
||||
|
||||
export interface PlanningSummaryRequires {
|
||||
phase: string;
|
||||
provides: string;
|
||||
}
|
||||
|
||||
// ─── Requirements ──────────────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningRequirement {
|
||||
/** Requirement ID (e.g. "R001") */
|
||||
id: string;
|
||||
/** Requirement title */
|
||||
title: string;
|
||||
/** Status (active, validated, deferred, etc.) */
|
||||
status: string;
|
||||
/** Description text */
|
||||
description: string;
|
||||
/** Raw section content */
|
||||
raw: string;
|
||||
}
|
||||
|
||||
// ─── Research ──────────────────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningResearch {
|
||||
/** File name */
|
||||
fileName: string;
|
||||
/** Raw content */
|
||||
content: string;
|
||||
}
|
||||
|
||||
// ─── Config ────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningConfig {
|
||||
/** Project name from config */
|
||||
projectName: string;
|
||||
/** Any other config fields */
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
// ─── Quick Tasks ───────────────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningQuickTask {
|
||||
/** Directory name (e.g. "001-fix-login") */
|
||||
dirName: string;
|
||||
/** Task number */
|
||||
number: number;
|
||||
/** Task slug */
|
||||
slug: string;
|
||||
/** Plan file content, null if missing */
|
||||
plan: string | null;
|
||||
/** Summary file content, null if missing */
|
||||
summary: string | null;
|
||||
}
|
||||
|
||||
// ─── Milestones ────────────────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningMilestone {
|
||||
/** Directory or file identifier (e.g. "v2.2") */
|
||||
id: string;
|
||||
/** Requirements file content, null if missing */
|
||||
requirements: string | null;
|
||||
/** Roadmap file content, null if missing */
|
||||
roadmap: string | null;
|
||||
/** Any other files */
|
||||
extraFiles: PlanningPhaseFile[];
|
||||
}
|
||||
|
||||
// ─── State ─────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningState {
|
||||
/** Raw content */
|
||||
raw: string;
|
||||
/** Extracted current phase */
|
||||
currentPhase: string | null;
|
||||
/** Extracted status */
|
||||
status: string | null;
|
||||
}
|
||||
|
||||
// ─── Generic File Reference ────────────────────────────────────────────────
|
||||
|
||||
export interface PlanningPhaseFile {
|
||||
/** File name */
|
||||
fileName: string;
|
||||
/** Raw content */
|
||||
content: string;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// GSD Output Types — produced by transformer, consumed by writer (S03)
|
||||
// Mirror GSD-2 runtime shapes so deriveState() works on migrated output.
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
export interface GSDProject {
|
||||
milestones: GSDMilestone[];
|
||||
/** Raw PROJECT.md text (pass through from old format) */
|
||||
projectContent: string;
|
||||
requirements: GSDRequirement[];
|
||||
/** Empty or pass-through from old project key decisions */
|
||||
decisionsContent: string;
|
||||
}
|
||||
|
||||
export interface GSDMilestone {
|
||||
/** e.g. "M001", "M002" */
|
||||
id: string;
|
||||
/** From old milestone section title or roadmap H1 */
|
||||
title: string;
|
||||
/** Derived from PROJECT.md description or roadmap H1 */
|
||||
vision: string;
|
||||
/** Empty [] if none found */
|
||||
successCriteria: string[];
|
||||
slices: GSDSlice[];
|
||||
/** Consolidated research blob, null if no research */
|
||||
research: string | null;
|
||||
/** Empty [] — old format has no boundary map equivalent */
|
||||
boundaryMap: GSDBoundaryEntry[];
|
||||
}
|
||||
|
||||
export interface GSDSlice {
|
||||
/** e.g. "S01", "S02" */
|
||||
id: string;
|
||||
/** Titlecased from phase slug */
|
||||
title: string;
|
||||
/** Default 'medium' */
|
||||
risk: 'low' | 'medium' | 'high';
|
||||
/** [prev slice ID] for sequential, [] for S01 */
|
||||
depends: string[];
|
||||
/** From roadmap checkbox */
|
||||
done: boolean;
|
||||
/** Derived from first plan objective or defaulted */
|
||||
demo: string;
|
||||
/** Same as demo or phase slug */
|
||||
goal: string;
|
||||
tasks: GSDTask[];
|
||||
/** Per-phase research content, null if none */
|
||||
research: string | null;
|
||||
/** Only populated if done */
|
||||
summary: GSDSliceSummaryData | null;
|
||||
}
|
||||
|
||||
export interface GSDTask {
|
||||
/** e.g. "T01", "T02" */
|
||||
id: string;
|
||||
/** From plan frontmatter or phase slug + plan number */
|
||||
title: string;
|
||||
/** From plan objective */
|
||||
description: string;
|
||||
/** Summary exists for this plan number */
|
||||
done: boolean;
|
||||
/** From summary duration if available, else '' */
|
||||
estimate: string;
|
||||
/** From plan frontmatter files_modified */
|
||||
files: string[];
|
||||
/** From plan frontmatter must_haves.truths */
|
||||
mustHaves: string[];
|
||||
/** Only populated if done */
|
||||
summary: GSDTaskSummaryData | null;
|
||||
}
|
||||
|
||||
export interface GSDRequirement {
|
||||
/** e.g. "R001" */
|
||||
id: string;
|
||||
title: string;
|
||||
/** Default 'core-capability' */
|
||||
class: string;
|
||||
/** 'active' | 'validated' | 'deferred' */
|
||||
status: string;
|
||||
description: string;
|
||||
/** Default 'inferred' */
|
||||
source: string;
|
||||
/** Default 'none yet' */
|
||||
primarySlice: string;
|
||||
}
|
||||
|
||||
export interface GSDSliceSummaryData {
|
||||
/** From last plan summary's completed field */
|
||||
completedAt: string;
|
||||
provides: string[];
|
||||
keyFiles: string[];
|
||||
keyDecisions: string[];
|
||||
patternsEstablished: string[];
|
||||
duration: string;
|
||||
/** From summary body */
|
||||
whatHappened: string;
|
||||
}
|
||||
|
||||
export interface GSDTaskSummaryData {
|
||||
completedAt: string;
|
||||
provides: string[];
|
||||
keyFiles: string[];
|
||||
duration: string;
|
||||
/** From summary body */
|
||||
whatHappened: string;
|
||||
}
|
||||
|
||||
export interface GSDBoundaryEntry {
|
||||
fromSlice: string;
|
||||
toSlice: string;
|
||||
produces: string;
|
||||
consumes: string;
|
||||
}
|
||||
53
src/resources/extensions/gsd/migrate/validator.ts
Normal file
53
src/resources/extensions/gsd/migrate/validator.ts
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
// Old .planning directory validator
|
||||
// Pre-flight checks for minimum viable .planning directory.
|
||||
// Pure functions, zero Pi dependencies — uses only Node built-ins + exported helpers.
|
||||
|
||||
import { existsSync, statSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
|
||||
import type { ValidationResult, ValidationIssue, ValidationSeverity } from './types.ts';
|
||||
|
||||
function issue(file: string, severity: ValidationSeverity, message: string): ValidationIssue {
|
||||
return { file, severity, message };
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a .planning directory has the minimum required structure.
|
||||
* Returns structured issues with severity levels:
|
||||
* - fatal: directory doesn't exist or ROADMAP.md missing (migration cannot proceed)
|
||||
* - warning: optional files missing (migration can proceed with reduced data)
|
||||
*/
|
||||
export async function validatePlanningDirectory(path: string): Promise<ValidationResult> {
|
||||
const issues: ValidationIssue[] = [];
|
||||
|
||||
// Check directory exists
|
||||
if (!existsSync(path) || !statSync(path).isDirectory()) {
|
||||
issues.push(issue(path, 'fatal', 'Directory does not exist'));
|
||||
return { valid: false, issues };
|
||||
}
|
||||
|
||||
// ROADMAP.md is required (fatal if missing)
|
||||
if (!existsSync(join(path, 'ROADMAP.md'))) {
|
||||
issues.push(issue('ROADMAP.md', 'fatal', 'ROADMAP.md is required for migration'));
|
||||
}
|
||||
|
||||
// Optional files — warn if missing
|
||||
if (!existsSync(join(path, 'PROJECT.md'))) {
|
||||
issues.push(issue('PROJECT.md', 'warning', 'PROJECT.md not found — project metadata will be empty'));
|
||||
}
|
||||
|
||||
if (!existsSync(join(path, 'REQUIREMENTS.md'))) {
|
||||
issues.push(issue('REQUIREMENTS.md', 'warning', 'REQUIREMENTS.md not found — requirements will be empty'));
|
||||
}
|
||||
|
||||
if (!existsSync(join(path, 'STATE.md'))) {
|
||||
issues.push(issue('STATE.md', 'warning', 'STATE.md not found — state information will be empty'));
|
||||
}
|
||||
|
||||
if (!existsSync(join(path, 'phases')) || !statSync(join(path, 'phases')).isDirectory()) {
|
||||
issues.push(issue('phases/', 'warning', 'phases/ directory not found — no phase data will be parsed'));
|
||||
}
|
||||
|
||||
const hasFatal = issues.some(i => i.severity === 'fatal');
|
||||
return { valid: !hasFatal, issues };
|
||||
}
|
||||
539
src/resources/extensions/gsd/migrate/writer.ts
Normal file
539
src/resources/extensions/gsd/migrate/writer.ts
Normal file
|
|
@ -0,0 +1,539 @@
|
|||
// GSD Directory Writer — Format Functions & Directory Orchestrator
|
||||
// Format functions: pure string-returning functions that serialize GSD types into the exact markdown
|
||||
// format that GSD-2's parsers expect (parseRoadmap, parsePlan, parseSummary, parseRequirementCounts).
|
||||
// writeGSDDirectory: orchestrator that writes a complete .gsd directory tree from a GSDProject.
|
||||
|
||||
import { join } from 'node:path';
|
||||
import { saveFile } from '../files.ts';
|
||||
|
||||
import type {
|
||||
GSDMilestone,
|
||||
GSDSlice,
|
||||
GSDTask,
|
||||
GSDRequirement,
|
||||
GSDProject,
|
||||
} from './types.ts';
|
||||
|
||||
// ─── Types ─────────────────────────────────────────────────────────────────
|
||||
|
||||
/** Result of writeGSDDirectory — lists all files that were written. */
|
||||
export interface WrittenFiles {
|
||||
/** Absolute paths of all files written */
|
||||
paths: string[];
|
||||
/** Count by category */
|
||||
counts: {
|
||||
roadmaps: number;
|
||||
plans: number;
|
||||
taskPlans: number;
|
||||
taskSummaries: number;
|
||||
sliceSummaries: number;
|
||||
research: number;
|
||||
requirements: number;
|
||||
contexts: number;
|
||||
other: number;
|
||||
};
|
||||
}
|
||||
|
||||
/** Pre-write statistics computed from a GSDProject without I/O. */
|
||||
export interface MigrationPreview {
|
||||
milestoneCount: number;
|
||||
totalSlices: number;
|
||||
totalTasks: number;
|
||||
doneSlices: number;
|
||||
doneTasks: number;
|
||||
sliceCompletionPct: number;
|
||||
taskCompletionPct: number;
|
||||
requirements: {
|
||||
active: number;
|
||||
validated: number;
|
||||
deferred: number;
|
||||
outOfScope: number;
|
||||
total: number;
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Local Helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Serialize a flat key-value map into YAML frontmatter block.
|
||||
* Matches parseFrontmatterMap() expectations:
|
||||
* - Scalars: `key: value`
|
||||
* - Arrays of strings: `key:\n - item`
|
||||
* - Empty arrays: `key: []`
|
||||
* - Arrays of objects: `key:\n - field1: val\n field2: val`
|
||||
* - Boolean: `key: true/false`
|
||||
*/
|
||||
function serializeFrontmatter(data: Record<string, unknown>): string {
|
||||
const lines: string[] = ['---'];
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
if (value === undefined || value === null) continue;
|
||||
|
||||
if (typeof value === 'boolean') {
|
||||
lines.push(`${key}: ${value}`);
|
||||
} else if (typeof value === 'string' || typeof value === 'number') {
|
||||
lines.push(`${key}: ${value}`);
|
||||
} else if (Array.isArray(value)) {
|
||||
if (value.length === 0) {
|
||||
lines.push(`${key}: []`);
|
||||
} else if (typeof value[0] === 'object' && value[0] !== null) {
|
||||
// Array of objects
|
||||
lines.push(`${key}:`);
|
||||
for (const obj of value) {
|
||||
const entries = Object.entries(obj as Record<string, string>);
|
||||
if (entries.length > 0) {
|
||||
lines.push(` - ${entries[0][0]}: ${entries[0][1]}`);
|
||||
for (let i = 1; i < entries.length; i++) {
|
||||
lines.push(` ${entries[i][0]}: ${entries[i][1]}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Array of scalars
|
||||
lines.push(`${key}:`);
|
||||
for (const item of value) {
|
||||
lines.push(` - ${item}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lines.push('---');
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
// ─── Format Functions ──────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Format a milestone's ROADMAP.md content.
|
||||
* Output must parse correctly through parseRoadmap().
|
||||
*/
|
||||
export function formatRoadmap(milestone: GSDMilestone): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
lines.push(`# ${milestone.id}: ${milestone.title}`);
|
||||
lines.push('');
|
||||
lines.push(`**Vision:** ${milestone.vision || '(migrated project)'}`);
|
||||
lines.push('');
|
||||
|
||||
lines.push('## Success Criteria');
|
||||
lines.push('');
|
||||
if (milestone.successCriteria.length > 0) {
|
||||
for (const criterion of milestone.successCriteria) {
|
||||
lines.push(`- ${criterion}`);
|
||||
}
|
||||
}
|
||||
lines.push('');
|
||||
|
||||
lines.push('## Slices');
|
||||
lines.push('');
|
||||
for (const slice of milestone.slices) {
|
||||
const check = slice.done ? 'x' : ' ';
|
||||
const depsStr = slice.depends.length > 0 ? slice.depends.join(', ') : '';
|
||||
lines.push(`- [${check}] **${slice.id}: ${slice.title}** \`risk:${slice.risk}\` \`depends:[${depsStr}]\``);
|
||||
if (slice.demo) {
|
||||
lines.push(` > After this: ${slice.demo}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Skip Boundary Map section entirely per D004
|
||||
|
||||
return lines.join('\n') + '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a slice's PLAN.md (S01-PLAN.md).
|
||||
* Output must parse correctly through parsePlan().
|
||||
*/
|
||||
export function formatPlan(slice: GSDSlice): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
lines.push(`# ${slice.id}: ${slice.title}`);
|
||||
lines.push('');
|
||||
lines.push(`**Goal:** ${slice.goal || slice.title}`);
|
||||
lines.push(`**Demo:** ${slice.demo || slice.title}`);
|
||||
lines.push('');
|
||||
|
||||
lines.push('## Must-Haves');
|
||||
lines.push('');
|
||||
// No must-haves in migrated data — empty section
|
||||
lines.push('');
|
||||
|
||||
lines.push('## Tasks');
|
||||
lines.push('');
|
||||
for (const task of slice.tasks) {
|
||||
const check = task.done ? 'x' : ' ';
|
||||
const estPart = task.estimate ? ` \`est:${task.estimate}\`` : '';
|
||||
lines.push(`- [${check}] **${task.id}: ${task.title}**${estPart}`);
|
||||
if (task.description) {
|
||||
lines.push(` - ${task.description}`);
|
||||
}
|
||||
}
|
||||
lines.push('');
|
||||
|
||||
lines.push('## Files Likely Touched');
|
||||
lines.push('');
|
||||
for (const task of slice.tasks) {
|
||||
for (const file of task.files) {
|
||||
lines.push(`- \`${file}\``);
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n') + '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a slice summary (S01-SUMMARY.md).
|
||||
* Output must parse correctly through parseSummary().
|
||||
*/
|
||||
export function formatSliceSummary(slice: GSDSlice, milestoneId: string): string {
|
||||
if (!slice.summary) return '';
|
||||
|
||||
const s = slice.summary;
|
||||
const fm = serializeFrontmatter({
|
||||
id: slice.id,
|
||||
parent: milestoneId,
|
||||
milestone: milestoneId,
|
||||
provides: s.provides,
|
||||
requires: [],
|
||||
affects: [],
|
||||
key_files: s.keyFiles,
|
||||
key_decisions: s.keyDecisions,
|
||||
patterns_established: s.patternsEstablished,
|
||||
observability_surfaces: [],
|
||||
drill_down_paths: [],
|
||||
duration: s.duration || '',
|
||||
verification_result: 'passed',
|
||||
completed_at: s.completedAt || '',
|
||||
blocker_discovered: false,
|
||||
});
|
||||
|
||||
const body = [
|
||||
'',
|
||||
`# ${slice.id}: ${slice.title}`,
|
||||
'',
|
||||
`**${s.whatHappened ? s.whatHappened.split('\n')[0] : 'Migrated from legacy format'}**`,
|
||||
'',
|
||||
'## What Happened',
|
||||
'',
|
||||
s.whatHappened || 'Migrated from legacy planning format.',
|
||||
];
|
||||
|
||||
return fm + body.join('\n') + '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a task summary (T01-SUMMARY.md).
|
||||
* Output must parse correctly through parseSummary().
|
||||
*/
|
||||
export function formatTaskSummary(task: GSDTask, sliceId: string, milestoneId: string): string {
|
||||
if (!task.summary) return '';
|
||||
|
||||
const s = task.summary;
|
||||
const fm = serializeFrontmatter({
|
||||
id: task.id,
|
||||
parent: sliceId,
|
||||
milestone: milestoneId,
|
||||
provides: s.provides,
|
||||
requires: [],
|
||||
affects: [],
|
||||
key_files: s.keyFiles,
|
||||
key_decisions: [],
|
||||
patterns_established: [],
|
||||
observability_surfaces: [],
|
||||
drill_down_paths: [],
|
||||
duration: s.duration || '',
|
||||
verification_result: 'passed',
|
||||
completed_at: s.completedAt || '',
|
||||
blocker_discovered: false,
|
||||
});
|
||||
|
||||
const body = [
|
||||
'',
|
||||
`# ${task.id}: ${task.title}`,
|
||||
'',
|
||||
`**${s.whatHappened ? s.whatHappened.split('\n')[0] : 'Migrated from legacy format'}**`,
|
||||
'',
|
||||
'## What Happened',
|
||||
'',
|
||||
s.whatHappened || 'Migrated from legacy planning format.',
|
||||
];
|
||||
|
||||
return fm + body.join('\n') + '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a task plan (T01-PLAN.md).
|
||||
* deriveState() only checks for file existence, not content.
|
||||
* Keep it minimal but valid markdown.
|
||||
*/
|
||||
export function formatTaskPlan(task: GSDTask, sliceId: string, milestoneId: string): string {
|
||||
const lines: string[] = [];
|
||||
lines.push(`# ${task.id}: ${task.title}`);
|
||||
lines.push('');
|
||||
lines.push(`**Slice:** ${sliceId} — **Milestone:** ${milestoneId}`);
|
||||
lines.push('');
|
||||
lines.push('## Description');
|
||||
lines.push('');
|
||||
lines.push(task.description || 'Migrated from legacy planning format.');
|
||||
lines.push('');
|
||||
|
||||
if (task.mustHaves.length > 0) {
|
||||
lines.push('## Must-Haves');
|
||||
lines.push('');
|
||||
for (const mh of task.mustHaves) {
|
||||
lines.push(`- [ ] ${mh}`);
|
||||
}
|
||||
lines.push('');
|
||||
}
|
||||
|
||||
if (task.files.length > 0) {
|
||||
lines.push('## Files');
|
||||
lines.push('');
|
||||
for (const f of task.files) {
|
||||
lines.push(`- \`${f}\``);
|
||||
}
|
||||
lines.push('');
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Format REQUIREMENTS.md grouped by status.
|
||||
* Output must parse correctly through parseRequirementCounts().
|
||||
* parseRequirementCounts expects: ## Active/## Validated/## Deferred/## Out of Scope sections
|
||||
* with ### R001 — Title headings under each section.
|
||||
*/
|
||||
export function formatRequirements(requirements: GSDRequirement[]): string {
|
||||
const lines: string[] = [];
|
||||
lines.push('# Requirements');
|
||||
lines.push('');
|
||||
|
||||
const groups: Record<string, GSDRequirement[]> = {
|
||||
active: [],
|
||||
validated: [],
|
||||
deferred: [],
|
||||
'out-of-scope': [],
|
||||
};
|
||||
|
||||
for (const req of requirements) {
|
||||
const status = req.status.toLowerCase();
|
||||
if (status in groups) {
|
||||
groups[status].push(req);
|
||||
} else {
|
||||
groups.active.push(req);
|
||||
}
|
||||
}
|
||||
|
||||
const sectionMap: [string, string][] = [
|
||||
['active', 'Active'],
|
||||
['validated', 'Validated'],
|
||||
['deferred', 'Deferred'],
|
||||
['out-of-scope', 'Out of Scope'],
|
||||
];
|
||||
|
||||
for (const [key, heading] of sectionMap) {
|
||||
lines.push(`## ${heading}`);
|
||||
lines.push('');
|
||||
for (const req of groups[key]) {
|
||||
lines.push(`### ${req.id} — ${req.title}`);
|
||||
lines.push('');
|
||||
lines.push(`- Status: ${req.status}`);
|
||||
lines.push(`- Class: ${req.class}`);
|
||||
lines.push(`- Source: ${req.source}`);
|
||||
lines.push(`- Primary Slice: ${req.primarySlice}`);
|
||||
lines.push('');
|
||||
if (req.description) {
|
||||
lines.push(req.description);
|
||||
lines.push('');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
// ─── Passthrough Format Helpers ────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Format PROJECT.md content.
|
||||
* If content is empty, produce a minimal valid stub.
|
||||
*/
|
||||
export function formatProject(content: string): string {
|
||||
if (!content || !content.trim()) {
|
||||
return '# Project\n\n(Migrated project — no description available.)\n';
|
||||
}
|
||||
return content.endsWith('\n') ? content : content + '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Format DECISIONS.md content.
|
||||
* If content is empty, produce the standard header.
|
||||
*/
|
||||
export function formatDecisions(content: string): string {
|
||||
if (!content || !content.trim()) {
|
||||
return '# Decisions\n\n<!-- Append-only register of architectural and pattern decisions -->\n\n| ID | Decision | Rationale | Date |\n|----|----------|-----------|------|\n';
|
||||
}
|
||||
return content.endsWith('\n') ? content : content + '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a milestone CONTEXT.md.
|
||||
* Minimal context with no depends — migrated milestones have no upstream dependencies.
|
||||
*/
|
||||
export function formatContext(milestoneId: string): string {
|
||||
return `# ${milestoneId} Context\n\nMigrated milestone — no upstream dependencies.\n`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format STATE.md.
|
||||
* deriveState() does not read STATE.md — it recomputes from scratch.
|
||||
* Write a minimal stub that will be overwritten on first /gsd status.
|
||||
*/
|
||||
export function formatState(milestones: GSDMilestone[]): string {
|
||||
const lines: string[] = [];
|
||||
lines.push('# GSD State');
|
||||
lines.push('');
|
||||
lines.push('<!-- Auto-generated. Updated by deriveState(). -->');
|
||||
lines.push('');
|
||||
for (const m of milestones) {
|
||||
const doneSlices = m.slices.filter(s => s.done).length;
|
||||
const totalSlices = m.slices.length;
|
||||
lines.push(`## ${m.id}: ${m.title}`);
|
||||
lines.push('');
|
||||
lines.push(`- Slices: ${doneSlices}/${totalSlices}`);
|
||||
lines.push('');
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
// ─── Directory Writer Orchestrator ─────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Write a complete .gsd directory tree from a GSDProject.
|
||||
* Iterates milestones → slices → tasks, calls format functions,
|
||||
* and writes each file via saveFile(). Returns a manifest of written paths.
|
||||
*
|
||||
* Skips research/summary files when null (does not write empty stubs).
|
||||
*/
|
||||
export async function writeGSDDirectory(
|
||||
project: GSDProject,
|
||||
targetPath: string,
|
||||
): Promise<WrittenFiles> {
|
||||
const gsdDir = join(targetPath, '.gsd');
|
||||
const milestonesBase = join(gsdDir, 'milestones');
|
||||
const paths: string[] = [];
|
||||
const counts: WrittenFiles['counts'] = {
|
||||
roadmaps: 0,
|
||||
plans: 0,
|
||||
taskPlans: 0,
|
||||
taskSummaries: 0,
|
||||
sliceSummaries: 0,
|
||||
research: 0,
|
||||
requirements: 0,
|
||||
contexts: 0,
|
||||
other: 0,
|
||||
};
|
||||
|
||||
// Root-level files
|
||||
const projectPath = join(gsdDir, 'PROJECT.md');
|
||||
await saveFile(projectPath, formatProject(project.projectContent));
|
||||
paths.push(projectPath);
|
||||
counts.other++;
|
||||
|
||||
const decisionsPath = join(gsdDir, 'DECISIONS.md');
|
||||
await saveFile(decisionsPath, formatDecisions(project.decisionsContent));
|
||||
paths.push(decisionsPath);
|
||||
counts.other++;
|
||||
|
||||
const statePath = join(gsdDir, 'STATE.md');
|
||||
await saveFile(statePath, formatState(project.milestones));
|
||||
paths.push(statePath);
|
||||
counts.other++;
|
||||
|
||||
if (project.requirements.length > 0) {
|
||||
const reqPath = join(gsdDir, 'REQUIREMENTS.md');
|
||||
await saveFile(reqPath, formatRequirements(project.requirements));
|
||||
paths.push(reqPath);
|
||||
counts.requirements++;
|
||||
}
|
||||
|
||||
// Milestones
|
||||
for (const milestone of project.milestones) {
|
||||
const mDir = join(milestonesBase, milestone.id);
|
||||
|
||||
// Roadmap (always written, even for empty milestones)
|
||||
const roadmapPath = join(mDir, `${milestone.id}-ROADMAP.md`);
|
||||
await saveFile(roadmapPath, formatRoadmap(milestone));
|
||||
paths.push(roadmapPath);
|
||||
counts.roadmaps++;
|
||||
|
||||
// Context
|
||||
const contextPath = join(mDir, `${milestone.id}-CONTEXT.md`);
|
||||
await saveFile(contextPath, formatContext(milestone.id));
|
||||
paths.push(contextPath);
|
||||
counts.contexts++;
|
||||
|
||||
// Research (skip if null)
|
||||
if (milestone.research !== null) {
|
||||
const researchPath = join(mDir, `${milestone.id}-RESEARCH.md`);
|
||||
await saveFile(researchPath, milestone.research);
|
||||
paths.push(researchPath);
|
||||
counts.research++;
|
||||
}
|
||||
|
||||
// Slices
|
||||
for (const slice of milestone.slices) {
|
||||
const sDir = join(mDir, 'slices', slice.id);
|
||||
const tasksDir = join(sDir, 'tasks');
|
||||
|
||||
// Slice plan
|
||||
const planPath = join(sDir, `${slice.id}-PLAN.md`);
|
||||
await saveFile(planPath, formatPlan(slice));
|
||||
paths.push(planPath);
|
||||
counts.plans++;
|
||||
|
||||
// Slice research (skip if null)
|
||||
if (slice.research !== null) {
|
||||
const sliceResearchPath = join(sDir, `${slice.id}-RESEARCH.md`);
|
||||
await saveFile(sliceResearchPath, slice.research);
|
||||
paths.push(sliceResearchPath);
|
||||
counts.research++;
|
||||
}
|
||||
|
||||
// Slice summary (skip if null)
|
||||
if (slice.summary !== null) {
|
||||
const summaryContent = formatSliceSummary(slice, milestone.id);
|
||||
if (summaryContent) {
|
||||
const summaryPath = join(sDir, `${slice.id}-SUMMARY.md`);
|
||||
await saveFile(summaryPath, summaryContent);
|
||||
paths.push(summaryPath);
|
||||
counts.sliceSummaries++;
|
||||
}
|
||||
}
|
||||
|
||||
// Tasks
|
||||
for (const task of slice.tasks) {
|
||||
// Task plan (always written)
|
||||
const taskPlanPath = join(tasksDir, `${task.id}-PLAN.md`);
|
||||
await saveFile(taskPlanPath, formatTaskPlan(task, slice.id, milestone.id));
|
||||
paths.push(taskPlanPath);
|
||||
counts.taskPlans++;
|
||||
|
||||
// Task summary (skip if null)
|
||||
if (task.summary !== null) {
|
||||
const taskSummaryContent = formatTaskSummary(task, slice.id, milestone.id);
|
||||
if (taskSummaryContent) {
|
||||
const taskSummaryPath = join(tasksDir, `${task.id}-SUMMARY.md`);
|
||||
await saveFile(taskSummaryPath, taskSummaryContent);
|
||||
paths.push(taskSummaryPath);
|
||||
counts.taskSummaries++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { paths, counts };
|
||||
}
|
||||
66
src/resources/extensions/gsd/prompts/review-migration.md
Normal file
66
src/resources/extensions/gsd/prompts/review-migration.md
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
## Review Migrated .gsd Directory
|
||||
|
||||
A `/gsd migrate` command just wrote a `.gsd/` directory from an old `.planning` source. Your job is to audit the output and verify it meets GSD-2 standards before the user starts working with it.
|
||||
|
||||
### Source
|
||||
- Old `.planning` directory: `{{sourcePath}}`
|
||||
- Written `.gsd` directory: `{{gsdPath}}`
|
||||
|
||||
### Migration Stats
|
||||
{{previewStats}}
|
||||
|
||||
### Review Checklist
|
||||
|
||||
Work through each check. Report PASS/FAIL with specifics. Fix anything fixable in-place.
|
||||
|
||||
#### 1. Structure Validation
|
||||
- Run `deriveState()` on the `.gsd` directory (import from `state.ts`, pass the **project root** as basePath)
|
||||
- Confirm it returns a coherent phase (not `pre-planning` unless the project is truly empty)
|
||||
- Confirm activeMilestone, activeSlice, activeTask are sensible for the project's completion state
|
||||
- Confirm progress counts match the migration preview stats
|
||||
|
||||
#### 2. Roadmap Quality
|
||||
- Read `M001-ROADMAP.md` (and any other milestone roadmaps)
|
||||
- Confirm slice entries have meaningful titles (not file paths or garbled text)
|
||||
- Confirm `[x]`/`[ ]` completion markers are correct relative to the old roadmap
|
||||
- Confirm vision statement is present and meaningful (not empty or "Migration")
|
||||
|
||||
#### 3. Content Spot-Check
|
||||
- Pick 2-3 slices with the most tasks and read their plan files
|
||||
- Confirm task titles and descriptions carry over meaningfully from the old plans
|
||||
- Confirm summary files exist for completed tasks and contain relevant content
|
||||
- Check that research files (if present) contain consolidated content, not empty stubs
|
||||
|
||||
#### 4. Requirements (if any)
|
||||
- Read REQUIREMENTS.md
|
||||
- Confirm requirement IDs are present and non-duplicate
|
||||
- Confirm statuses make sense: completed old requirements should be `validated`, in-progress should be `active`
|
||||
|
||||
#### 5. PROJECT.md
|
||||
- Read the written PROJECT.md
|
||||
- Confirm it contains the old project's description, not boilerplate
|
||||
- Confirm it reads like a useful project summary
|
||||
|
||||
#### 6. Decisions
|
||||
- If DECISIONS.md was written, confirm it contains extracted decisions from old summaries (or is empty if no decisions existed)
|
||||
|
||||
### Output Format
|
||||
|
||||
Summarize your findings as:
|
||||
|
||||
```
|
||||
Migration Review: <project name>
|
||||
================================
|
||||
Structure: PASS/FAIL — <details>
|
||||
Roadmap: PASS/FAIL — <details>
|
||||
Content: PASS/FAIL — <details>
|
||||
Requirements: PASS/FAIL/SKIP — <details>
|
||||
Project: PASS/FAIL — <details>
|
||||
Decisions: PASS/FAIL/SKIP — <details>
|
||||
|
||||
Overall: PASS / PASS WITH NOTES / FAIL
|
||||
Issues: <list any problems found>
|
||||
Fixes applied: <list any in-place fixes made>
|
||||
```
|
||||
|
||||
If the overall result is FAIL, explain what needs manual attention. If PASS WITH NOTES, explain what's imperfect but acceptable. If PASS, confirm the `.gsd` directory is ready for GSD-2 auto-mode.
|
||||
390
src/resources/extensions/gsd/tests/migrate-command.test.ts
Normal file
390
src/resources/extensions/gsd/tests/migrate-command.test.ts
Normal file
|
|
@ -0,0 +1,390 @@
|
|||
// Migration command integration test
|
||||
// Tests the pipeline functions as the command handler uses them:
|
||||
// path resolution, validation gating, full parse→transform→preview→write→deriveState round-trip.
|
||||
// Exercises pipeline modules directly — no TUI context dependency.
|
||||
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync, existsSync } from 'node:fs';
|
||||
import { join, resolve } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import {
|
||||
validatePlanningDirectory,
|
||||
parsePlanningDirectory,
|
||||
transformToGSD,
|
||||
generatePreview,
|
||||
writeGSDDirectory,
|
||||
} from '../migrate/index.ts';
|
||||
import { deriveState } from '../state.ts';
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
function assert(condition: boolean, message: string): void {
|
||||
if (condition) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function assertEq<T>(actual: T, expected: T, message: string): void {
|
||||
if (JSON.stringify(actual) === JSON.stringify(expected)) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message} — expected ${JSON.stringify(expected)}, got ${JSON.stringify(actual)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Fixture Helpers ───────────────────────────────────────────────────────
|
||||
|
||||
const SAMPLE_PROJECT = `# Integration Test Project
|
||||
|
||||
A project used for command pipeline integration testing.
|
||||
|
||||
## Goals
|
||||
|
||||
- Test the full migration pipeline
|
||||
`;
|
||||
|
||||
const SAMPLE_ROADMAP = `# Project Roadmap
|
||||
|
||||
## Phases
|
||||
|
||||
- [x] 10 — Foundation
|
||||
- [ ] 20 — Features
|
||||
`;
|
||||
|
||||
const SAMPLE_REQUIREMENTS = `# Requirements
|
||||
|
||||
## Active
|
||||
|
||||
### R001 — Core Pipeline
|
||||
- Status: active
|
||||
- Description: Pipeline must work end-to-end.
|
||||
|
||||
## Validated
|
||||
|
||||
### R002 — Output Format
|
||||
- Status: validated
|
||||
- Description: Output matches GSD format.
|
||||
`;
|
||||
|
||||
const SAMPLE_STATE = `# State
|
||||
|
||||
**Current Phase:** 20-features
|
||||
**Status:** in-progress
|
||||
`;
|
||||
|
||||
const SAMPLE_CONFIG = JSON.stringify({
|
||||
projectName: 'pipeline-test',
|
||||
version: '1.0',
|
||||
});
|
||||
|
||||
const SAMPLE_PLAN_10_01 = `---
|
||||
phase: "10-foundation"
|
||||
plan: "01"
|
||||
type: "implementation"
|
||||
wave: 1
|
||||
depends_on: []
|
||||
files_modified: [src/core.ts]
|
||||
autonomous: true
|
||||
must_haves:
|
||||
truths:
|
||||
- Core module works
|
||||
artifacts:
|
||||
- src/core.ts
|
||||
key_links: []
|
||||
---
|
||||
|
||||
# 10-01: Build Foundation
|
||||
|
||||
<objective>
|
||||
Set up the project foundation and core module.
|
||||
</objective>
|
||||
|
||||
<tasks>
|
||||
<task>Create core module</task>
|
||||
<task>Add configuration loader</task>
|
||||
</tasks>
|
||||
|
||||
<context>
|
||||
Foundation work needed before features.
|
||||
</context>
|
||||
|
||||
<verification>
|
||||
- Core module loads
|
||||
- Config is parsed
|
||||
</verification>
|
||||
|
||||
<success_criteria>
|
||||
Core is operational.
|
||||
</success_criteria>
|
||||
`;
|
||||
|
||||
const SAMPLE_SUMMARY_10_01 = `---
|
||||
phase: "10-foundation"
|
||||
plan: "01"
|
||||
subsystem: "core"
|
||||
tags:
|
||||
- foundation
|
||||
requires: []
|
||||
provides:
|
||||
- core-module
|
||||
affects:
|
||||
- features
|
||||
tech-stack:
|
||||
- typescript
|
||||
key-files:
|
||||
- src/core.ts
|
||||
key-decisions:
|
||||
- Use TypeScript strict mode
|
||||
patterns-established:
|
||||
- Module pattern
|
||||
duration: "1h"
|
||||
completed: "2026-01-10"
|
||||
---
|
||||
|
||||
# 10-01: Foundation Summary
|
||||
|
||||
Core module built and operational.
|
||||
|
||||
## What Happened
|
||||
|
||||
Created core module and configuration loader.
|
||||
|
||||
## Files Modified
|
||||
|
||||
- \`src/core.ts\` — Core module
|
||||
`;
|
||||
|
||||
const SAMPLE_PLAN_20_01 = `---
|
||||
phase: "20-features"
|
||||
plan: "01"
|
||||
type: "implementation"
|
||||
wave: 1
|
||||
depends_on: [10-01]
|
||||
files_modified: []
|
||||
autonomous: false
|
||||
---
|
||||
|
||||
# 20-01: Build Feature A
|
||||
|
||||
<objective>
|
||||
Implement the first feature.
|
||||
</objective>
|
||||
|
||||
<tasks>
|
||||
<task>Design feature API</task>
|
||||
<task>Implement feature logic</task>
|
||||
</tasks>
|
||||
|
||||
<context>
|
||||
Depends on foundation work.
|
||||
</context>
|
||||
`;
|
||||
|
||||
function createCompleteFixture(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-cmd-test-'));
|
||||
const planning = join(base, '.planning');
|
||||
mkdirSync(planning, { recursive: true });
|
||||
|
||||
writeFileSync(join(planning, 'PROJECT.md'), SAMPLE_PROJECT);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
writeFileSync(join(planning, 'REQUIREMENTS.md'), SAMPLE_REQUIREMENTS);
|
||||
writeFileSync(join(planning, 'STATE.md'), SAMPLE_STATE);
|
||||
writeFileSync(join(planning, 'config.json'), SAMPLE_CONFIG);
|
||||
|
||||
// Phase 10: done — has plan + summary
|
||||
const phase10 = join(planning, 'phases', '10-foundation');
|
||||
mkdirSync(phase10, { recursive: true });
|
||||
writeFileSync(join(phase10, '10-01-PLAN.md'), SAMPLE_PLAN_10_01);
|
||||
writeFileSync(join(phase10, '10-01-SUMMARY.md'), SAMPLE_SUMMARY_10_01);
|
||||
|
||||
// Phase 20: in-progress — has plan, no summary
|
||||
const phase20 = join(planning, 'phases', '20-features');
|
||||
mkdirSync(phase20, { recursive: true });
|
||||
writeFileSync(join(phase20, '20-01-PLAN.md'), SAMPLE_PLAN_20_01);
|
||||
|
||||
return base;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
async function main(): Promise<void> {
|
||||
|
||||
// ─── Test 1: Path resolution — .planning appended when missing ─────────
|
||||
console.log('\n=== Path resolution: .planning appended when source path lacks it ===');
|
||||
{
|
||||
const base = createCompleteFixture();
|
||||
try {
|
||||
// Simulate the command's path resolution logic
|
||||
let sourcePath = resolve(base); // no .planning suffix
|
||||
if (!sourcePath.endsWith('.planning')) {
|
||||
sourcePath = join(sourcePath, '.planning');
|
||||
}
|
||||
assert(sourcePath.endsWith('.planning'), 'path-resolution: .planning appended');
|
||||
assert(existsSync(sourcePath), 'path-resolution: appended path exists');
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 2: Path resolution — .planning used as-is ────────────────────
|
||||
console.log('\n=== Path resolution: .planning used as-is when already present ===');
|
||||
{
|
||||
const base = createCompleteFixture();
|
||||
try {
|
||||
const planningPath = join(base, '.planning');
|
||||
let sourcePath = resolve(planningPath);
|
||||
if (!sourcePath.endsWith('.planning')) {
|
||||
sourcePath = join(sourcePath, '.planning');
|
||||
}
|
||||
assertEq(sourcePath, resolve(planningPath), 'path-resolution: .planning not double-appended');
|
||||
assert(existsSync(sourcePath), 'path-resolution: direct path exists');
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 3: Validation gating — non-existent path ─────────────────────
|
||||
console.log('\n=== Validation gating: non-existent path returns invalid ===');
|
||||
{
|
||||
const fakePath = join(tmpdir(), 'gsd-cmd-nonexistent-' + Date.now(), '.planning');
|
||||
const result = await validatePlanningDirectory(fakePath);
|
||||
assertEq(result.valid, false, 'validation: non-existent path is invalid');
|
||||
assert(result.issues.length > 0, 'validation: has issues for non-existent path');
|
||||
const hasFatal = result.issues.some(i => i.severity === 'fatal');
|
||||
assert(hasFatal, 'validation: non-existent path has fatal issue');
|
||||
}
|
||||
|
||||
// ─── Test 4: Validation gating — valid fixture passes ──────────────────
|
||||
console.log('\n=== Validation gating: valid fixture passes validation ===');
|
||||
{
|
||||
const base = createCompleteFixture();
|
||||
try {
|
||||
const result = await validatePlanningDirectory(join(base, '.planning'));
|
||||
assert(result.valid === true, 'validation: valid fixture passes');
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 5: Full pipeline round-trip ──────────────────────────────────
|
||||
console.log('\n=== Full pipeline: parse → transform → preview → write → deriveState ===');
|
||||
{
|
||||
const base = createCompleteFixture();
|
||||
const writeTarget = mkdtempSync(join(tmpdir(), 'gsd-cmd-write-'));
|
||||
try {
|
||||
const planningPath = join(base, '.planning');
|
||||
|
||||
// (a) Validate
|
||||
const validation = await validatePlanningDirectory(planningPath);
|
||||
assert(validation.valid === true, 'pipeline: validation passes');
|
||||
|
||||
// (b) Parse
|
||||
const parsed = await parsePlanningDirectory(planningPath);
|
||||
assert(parsed.roadmap !== null, 'pipeline: roadmap parsed');
|
||||
assert(Object.keys(parsed.phases).length >= 2, 'pipeline: phases parsed');
|
||||
|
||||
// (c) Transform
|
||||
const project = transformToGSD(parsed);
|
||||
assert(project.milestones.length >= 1, 'pipeline: has milestones');
|
||||
assert(project.milestones[0].slices.length >= 1, 'pipeline: has slices');
|
||||
|
||||
// Count totals for preview verification
|
||||
let totalTasks = 0;
|
||||
let doneTasks = 0;
|
||||
let totalSlices = 0;
|
||||
let doneSlices = 0;
|
||||
for (const m of project.milestones) {
|
||||
for (const s of m.slices) {
|
||||
totalSlices++;
|
||||
if (s.done) doneSlices++;
|
||||
for (const t of s.tasks) {
|
||||
totalTasks++;
|
||||
if (t.done) doneTasks++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// (d) Preview — verify counts match project data
|
||||
const preview = generatePreview(project);
|
||||
assertEq(preview.milestoneCount, project.milestones.length, 'pipeline: preview milestoneCount');
|
||||
assertEq(preview.totalSlices, totalSlices, 'pipeline: preview totalSlices');
|
||||
assertEq(preview.totalTasks, totalTasks, 'pipeline: preview totalTasks');
|
||||
assertEq(preview.doneSlices, doneSlices, 'pipeline: preview doneSlices');
|
||||
assertEq(preview.doneTasks, doneTasks, 'pipeline: preview doneTasks');
|
||||
|
||||
// Completion percentages
|
||||
const expectedSlicePct = totalSlices > 0 ? Math.round((doneSlices / totalSlices) * 100) : 0;
|
||||
const expectedTaskPct = totalTasks > 0 ? Math.round((doneTasks / totalTasks) * 100) : 0;
|
||||
assertEq(preview.sliceCompletionPct, expectedSlicePct, 'pipeline: preview sliceCompletionPct');
|
||||
assertEq(preview.taskCompletionPct, expectedTaskPct, 'pipeline: preview taskCompletionPct');
|
||||
|
||||
// Requirements in preview
|
||||
assertEq(preview.requirements.active, 1, 'pipeline: preview requirements active');
|
||||
assertEq(preview.requirements.validated, 1, 'pipeline: preview requirements validated');
|
||||
assertEq(preview.requirements.total, 2, 'pipeline: preview requirements total');
|
||||
|
||||
// (e) Write
|
||||
const result = await writeGSDDirectory(project, writeTarget);
|
||||
assert(result.paths.length > 0, 'pipeline: files written');
|
||||
|
||||
// Key files exist
|
||||
const gsd = join(writeTarget, '.gsd');
|
||||
assert(existsSync(join(gsd, 'PROJECT.md')), 'pipeline: PROJECT.md written');
|
||||
assert(existsSync(join(gsd, 'STATE.md')), 'pipeline: STATE.md written');
|
||||
assert(existsSync(join(gsd, 'REQUIREMENTS.md')), 'pipeline: REQUIREMENTS.md written');
|
||||
|
||||
const m001 = join(gsd, 'milestones', 'M001');
|
||||
assert(existsSync(join(m001, 'M001-ROADMAP.md')), 'pipeline: M001-ROADMAP.md written');
|
||||
assert(existsSync(join(m001, 'M001-CONTEXT.md')), 'pipeline: M001-CONTEXT.md written');
|
||||
|
||||
// At least one slice plan exists
|
||||
const s01Plan = join(m001, 'slices', 'S01', 'S01-PLAN.md');
|
||||
assert(existsSync(s01Plan), 'pipeline: S01-PLAN.md written');
|
||||
|
||||
// (f) deriveState — coherent state from written output
|
||||
console.log(' --- deriveState ---');
|
||||
const state = await deriveState(writeTarget);
|
||||
assert(state.phase !== undefined, 'pipeline: deriveState returns phase');
|
||||
assert(state.activeMilestone !== null, 'pipeline: deriveState has activeMilestone');
|
||||
assertEq(state.activeMilestone!.id, 'M001', 'pipeline: deriveState activeMilestone is M001');
|
||||
assert(state.progress.slices !== undefined, 'pipeline: deriveState has slices progress');
|
||||
assert(state.progress.tasks !== undefined, 'pipeline: deriveState has tasks progress');
|
||||
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
rmSync(writeTarget, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 6: .gsd/ exists detection ────────────────────────────────────
|
||||
console.log('\n=== .gsd/ exists detection ===');
|
||||
{
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-cmd-exists-'));
|
||||
try {
|
||||
// No .gsd/ yet
|
||||
assert(!existsSync(join(base, '.gsd')), 'exists-detection: .gsd absent initially');
|
||||
|
||||
// Create .gsd/
|
||||
mkdirSync(join(base, '.gsd'), { recursive: true });
|
||||
assert(existsSync(join(base, '.gsd')), 'exists-detection: .gsd detected after creation');
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Results ─────────────────────────────────────────────────────────────
|
||||
console.log(`\n${passed + failed} assertions: ${passed} passed, ${failed} failed`);
|
||||
if (failed > 0) process.exit(1);
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('Unhandled error:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
786
src/resources/extensions/gsd/tests/migrate-parser.test.ts
Normal file
786
src/resources/extensions/gsd/tests/migrate-parser.test.ts
Normal file
|
|
@ -0,0 +1,786 @@
|
|||
// Migration parser test suite
|
||||
// Tests for parsing old .planning directories into typed PlanningProject structures.
|
||||
// Uses synthetic fixture directories — no real .planning dirs needed.
|
||||
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { parsePlanningDirectory } from '../migrate/parser.ts';
|
||||
import { validatePlanningDirectory } from '../migrate/validator.ts';
|
||||
|
||||
import type { PlanningProject, ValidationResult } from '../migrate/types.ts';
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
function assert(condition: boolean, message: string): void {
|
||||
if (condition) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function assertEq<T>(actual: T, expected: T, message: string): void {
|
||||
if (JSON.stringify(actual) === JSON.stringify(expected)) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message} — expected ${JSON.stringify(expected)}, got ${JSON.stringify(actual)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Fixture Helpers ───────────────────────────────────────────────────────
|
||||
|
||||
function createFixtureBase(): string {
|
||||
return mkdtempSync(join(tmpdir(), 'gsd-migrate-test-'));
|
||||
}
|
||||
|
||||
function createPlanningDir(base: string): string {
|
||||
const dir = join(base, '.planning');
|
||||
mkdirSync(dir, { recursive: true });
|
||||
return dir;
|
||||
}
|
||||
|
||||
function writeFile(dir: string, ...pathParts: string[]): (content: string) => void {
|
||||
return (content: string) => {
|
||||
const filePath = join(dir, ...pathParts);
|
||||
mkdirSync(join(filePath, '..'), { recursive: true });
|
||||
writeFileSync(filePath, content);
|
||||
};
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// ─── Sample Fixtures ───────────────────────────────────────────────────────
|
||||
|
||||
const SAMPLE_ROADMAP = `# Project Roadmap
|
||||
|
||||
## Phases
|
||||
|
||||
- [x] 29 — Auth System
|
||||
- [ ] 30 — Dashboard
|
||||
- [ ] 31 — Notifications
|
||||
`;
|
||||
|
||||
const SAMPLE_PROJECT = `# My Project
|
||||
|
||||
A sample project for testing the migration parser.
|
||||
|
||||
## Goals
|
||||
|
||||
- Build a thing
|
||||
- Ship it
|
||||
`;
|
||||
|
||||
const SAMPLE_REQUIREMENTS = `# Requirements
|
||||
|
||||
## Active
|
||||
|
||||
### R001 — User Authentication
|
||||
- Status: active
|
||||
- Description: Users must be able to log in.
|
||||
|
||||
### R002 — Dashboard View
|
||||
- Status: active
|
||||
- Description: Main dashboard page.
|
||||
|
||||
## Validated
|
||||
|
||||
### R003 — Session Management
|
||||
- Status: validated
|
||||
- Description: Sessions expire after 24h.
|
||||
|
||||
## Deferred
|
||||
|
||||
### R004 — OAuth Support
|
||||
- Status: deferred
|
||||
- Description: Third-party login.
|
||||
`;
|
||||
|
||||
const SAMPLE_STATE = `# State
|
||||
|
||||
**Current Phase:** 30-dashboard
|
||||
**Status:** in-progress
|
||||
`;
|
||||
|
||||
const SAMPLE_CONFIG = JSON.stringify({
|
||||
projectName: 'test-project',
|
||||
version: '1.0',
|
||||
});
|
||||
|
||||
const SAMPLE_PLAN_XML = `---
|
||||
phase: "29-auth-system"
|
||||
plan: "01"
|
||||
type: "implementation"
|
||||
wave: 1
|
||||
depends_on: []
|
||||
files_modified: [src/auth.ts, src/login.ts]
|
||||
autonomous: true
|
||||
must_haves:
|
||||
truths:
|
||||
- Users can log in
|
||||
artifacts:
|
||||
- src/auth.ts
|
||||
key_links: []
|
||||
---
|
||||
|
||||
# 29-01: Implement Auth
|
||||
|
||||
<objective>
|
||||
Build the authentication system with JWT tokens and session management.
|
||||
</objective>
|
||||
|
||||
<tasks>
|
||||
<task>Create auth middleware</task>
|
||||
<task>Add login endpoint</task>
|
||||
<task>Add logout endpoint</task>
|
||||
</tasks>
|
||||
|
||||
<context>
|
||||
The project needs authentication before any other features can be built.
|
||||
Auth tokens use JWT with RS256 signing.
|
||||
</context>
|
||||
|
||||
<verification>
|
||||
- Login returns valid JWT
|
||||
- Middleware rejects invalid tokens
|
||||
- Logout invalidates session
|
||||
</verification>
|
||||
|
||||
<success_criteria>
|
||||
All auth endpoints respond correctly and tokens are validated.
|
||||
</success_criteria>
|
||||
`;
|
||||
|
||||
const SAMPLE_SUMMARY = `---
|
||||
phase: "29-auth-system"
|
||||
plan: "01"
|
||||
subsystem: "auth"
|
||||
tags:
|
||||
- authentication
|
||||
- security
|
||||
requires: []
|
||||
provides:
|
||||
- auth-middleware
|
||||
- jwt-validation
|
||||
affects:
|
||||
- api-routes
|
||||
tech-stack:
|
||||
- jsonwebtoken
|
||||
- express
|
||||
key-files:
|
||||
- src/auth.ts
|
||||
- src/middleware/auth.ts
|
||||
key-decisions:
|
||||
- Use RS256 for JWT signing
|
||||
- Store refresh tokens in DB
|
||||
patterns-established:
|
||||
- Middleware-based auth
|
||||
duration: "2h"
|
||||
completed: "2026-01-15"
|
||||
---
|
||||
|
||||
# 29-01: Auth Implementation Summary
|
||||
|
||||
Authentication system implemented with JWT tokens.
|
||||
|
||||
## What Happened
|
||||
|
||||
Built the auth middleware and login/logout endpoints.
|
||||
|
||||
## Files Modified
|
||||
|
||||
- \`src/auth.ts\` — Core auth logic
|
||||
- \`src/middleware/auth.ts\` — Express middleware
|
||||
`;
|
||||
|
||||
const SAMPLE_RESEARCH = `# Auth Research
|
||||
|
||||
## JWT vs Session Tokens
|
||||
|
||||
JWT tokens are stateless and work well for microservices.
|
||||
Session tokens require server-side storage but are easier to revoke.
|
||||
|
||||
## Decision
|
||||
|
||||
Use JWT with short expiry + refresh tokens.
|
||||
`;
|
||||
|
||||
const SAMPLE_MILESTONE_ROADMAP = `# Milestone v2.2 Roadmap
|
||||
|
||||
## Phases
|
||||
|
||||
- [x] 29 — Auth System
|
||||
- [x] 30 — Dashboard
|
||||
`;
|
||||
|
||||
const SAMPLE_MILESTONE_SECTIONED_ROADMAP = `# Project Roadmap
|
||||
|
||||
## v2.0 — Foundation
|
||||
|
||||
<details>
|
||||
<summary>Completed</summary>
|
||||
|
||||
- [x] 01 — Project Setup
|
||||
- [x] 02 — Database Schema
|
||||
|
||||
</details>
|
||||
|
||||
## v2.5 — Features
|
||||
|
||||
- [x] 29 — Auth System
|
||||
- [ ] 30 — Dashboard
|
||||
- [ ] 31 — Notifications
|
||||
`;
|
||||
|
||||
const SAMPLE_QUICK_PLAN = `# 001: Fix Login Bug
|
||||
|
||||
## Description
|
||||
|
||||
Fix the login button not responding on mobile.
|
||||
|
||||
## Steps
|
||||
|
||||
1. Debug click handler
|
||||
2. Fix event propagation
|
||||
3. Test on mobile
|
||||
`;
|
||||
|
||||
const SAMPLE_QUICK_SUMMARY = `# 001: Fix Login Bug — Summary
|
||||
|
||||
Fixed the login button by correcting the touch event handler.
|
||||
`;
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Test Groups
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
async function main(): Promise<void> {
|
||||
|
||||
// ─── Test 1: Complete .planning directory ──────────────────────────────
|
||||
console.log('\n=== Complete .planning directory with all file types ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
|
||||
// Root files
|
||||
writeFileSync(join(planning, 'PROJECT.md'), SAMPLE_PROJECT);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
writeFileSync(join(planning, 'REQUIREMENTS.md'), SAMPLE_REQUIREMENTS);
|
||||
writeFileSync(join(planning, 'STATE.md'), SAMPLE_STATE);
|
||||
writeFileSync(join(planning, 'config.json'), SAMPLE_CONFIG);
|
||||
|
||||
// Phase directory with plan, summary, research
|
||||
const phaseDir = join(planning, 'phases', '29-auth-system');
|
||||
mkdirSync(phaseDir, { recursive: true });
|
||||
writeFileSync(join(phaseDir, '29-01-PLAN.md'), SAMPLE_PLAN_XML);
|
||||
writeFileSync(join(phaseDir, '29-01-SUMMARY.md'), SAMPLE_SUMMARY);
|
||||
writeFileSync(join(phaseDir, '29-RESEARCH.md'), SAMPLE_RESEARCH);
|
||||
|
||||
// Second phase directory
|
||||
const phase2Dir = join(planning, 'phases', '30-dashboard');
|
||||
mkdirSync(phase2Dir, { recursive: true });
|
||||
writeFileSync(join(phase2Dir, '30-01-PLAN.md'), `---
|
||||
phase: "30-dashboard"
|
||||
plan: "01"
|
||||
type: "implementation"
|
||||
wave: 1
|
||||
depends_on: [29-01]
|
||||
files_modified: []
|
||||
autonomous: false
|
||||
---
|
||||
|
||||
# 30-01: Build Dashboard
|
||||
|
||||
<objective>
|
||||
Create the main dashboard view.
|
||||
</objective>
|
||||
|
||||
<tasks>
|
||||
<task>Create dashboard component</task>
|
||||
<task>Add data fetching</task>
|
||||
</tasks>
|
||||
|
||||
<context>
|
||||
Dashboard needs auth to be complete first.
|
||||
</context>
|
||||
`);
|
||||
|
||||
// Quick tasks
|
||||
const quickDir = join(planning, 'quick', '001-fix-login');
|
||||
mkdirSync(quickDir, { recursive: true });
|
||||
writeFileSync(join(quickDir, '001-PLAN.md'), SAMPLE_QUICK_PLAN);
|
||||
writeFileSync(join(quickDir, '001-SUMMARY.md'), SAMPLE_QUICK_SUMMARY);
|
||||
|
||||
// Milestones
|
||||
const msDir = join(planning, 'milestones');
|
||||
mkdirSync(msDir, { recursive: true });
|
||||
writeFileSync(join(msDir, 'v2.2-ROADMAP.md'), SAMPLE_MILESTONE_ROADMAP);
|
||||
writeFileSync(join(msDir, 'v2.2-REQUIREMENTS.md'), 'Milestone requirements here.');
|
||||
|
||||
// Research at root
|
||||
const researchDir = join(planning, 'research');
|
||||
mkdirSync(researchDir, { recursive: true });
|
||||
writeFileSync(join(researchDir, 'architecture.md'), '# Architecture Research\n\nNotes.');
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
|
||||
// Top-level structure
|
||||
assertEq(project.path, planning, 'project.path matches');
|
||||
assert(project.project !== null, 'PROJECT.md parsed');
|
||||
assert(project.roadmap !== null, 'ROADMAP.md parsed');
|
||||
assert(project.requirements.length > 0, 'requirements parsed');
|
||||
assert(project.state !== null, 'STATE.md parsed');
|
||||
assert(project.config !== null, 'config.json parsed');
|
||||
|
||||
// Phases
|
||||
assert('29-auth-system' in project.phases, 'phase 29 present');
|
||||
assert('30-dashboard' in project.phases, 'phase 30 present');
|
||||
|
||||
const phase29 = project.phases['29-auth-system'];
|
||||
assertEq(phase29?.number, 29, 'phase 29 number');
|
||||
assertEq(phase29?.slug, 'auth-system', 'phase 29 slug');
|
||||
assert('01' in (phase29?.plans ?? {}), 'phase 29 has plan 01');
|
||||
assert('01' in (phase29?.summaries ?? {}), 'phase 29 has summary 01');
|
||||
assert((phase29?.research?.length ?? 0) > 0, 'phase 29 has research');
|
||||
|
||||
// Plan content (XML-in-markdown)
|
||||
const plan29 = phase29?.plans?.['01'];
|
||||
assert(plan29 !== undefined, 'plan 29-01 exists');
|
||||
assert(plan29?.objective?.includes('authentication') ?? false, 'plan objective extracted');
|
||||
assert((plan29?.tasks?.length ?? 0) >= 3, 'plan tasks extracted');
|
||||
assert(plan29?.context?.includes('JWT') ?? false, 'plan context extracted');
|
||||
assert(plan29?.verification !== '', 'plan verification extracted');
|
||||
assert(plan29?.successCriteria !== '', 'plan success criteria extracted');
|
||||
|
||||
// Plan frontmatter
|
||||
assertEq(plan29?.frontmatter?.phase, '29-auth-system', 'plan frontmatter phase');
|
||||
assertEq(plan29?.frontmatter?.plan, '01', 'plan frontmatter plan');
|
||||
assertEq(plan29?.frontmatter?.type, 'implementation', 'plan frontmatter type');
|
||||
assertEq(plan29?.frontmatter?.wave, 1, 'plan frontmatter wave');
|
||||
assertEq(plan29?.frontmatter?.autonomous, true, 'plan frontmatter autonomous');
|
||||
|
||||
// Summary content
|
||||
const summary29 = phase29?.summaries?.['01'];
|
||||
assert(summary29 !== undefined, 'summary 29-01 exists');
|
||||
assertEq(summary29?.frontmatter?.phase, '29-auth-system', 'summary frontmatter phase');
|
||||
assertEq(summary29?.frontmatter?.plan, '01', 'summary frontmatter plan');
|
||||
assertEq(summary29?.frontmatter?.subsystem, 'auth', 'summary frontmatter subsystem');
|
||||
assert((summary29?.frontmatter?.tags?.length ?? 0) >= 2, 'summary frontmatter tags');
|
||||
assert((summary29?.frontmatter?.provides?.length ?? 0) >= 2, 'summary frontmatter provides');
|
||||
assert((summary29?.frontmatter?.affects?.length ?? 0) >= 1, 'summary frontmatter affects');
|
||||
assert((summary29?.frontmatter?.['tech-stack']?.length ?? 0) >= 2, 'summary frontmatter tech-stack');
|
||||
assert((summary29?.frontmatter?.['key-files']?.length ?? 0) >= 2, 'summary frontmatter key-files');
|
||||
assert((summary29?.frontmatter?.['key-decisions']?.length ?? 0) >= 2, 'summary frontmatter key-decisions');
|
||||
assert((summary29?.frontmatter?.['patterns-established']?.length ?? 0) >= 1, 'summary frontmatter patterns-established');
|
||||
assertEq(summary29?.frontmatter?.duration, '2h', 'summary frontmatter duration');
|
||||
assertEq(summary29?.frontmatter?.completed, '2026-01-15', 'summary frontmatter completed');
|
||||
|
||||
// Quick tasks
|
||||
assert(project.quickTasks.length >= 1, 'quick tasks parsed');
|
||||
assertEq(project.quickTasks[0]?.number, 1, 'quick task number');
|
||||
assert(project.quickTasks[0]?.plan !== null, 'quick task has plan');
|
||||
assert(project.quickTasks[0]?.summary !== null, 'quick task has summary');
|
||||
|
||||
// Milestones
|
||||
assert(project.milestones.length >= 1, 'milestones parsed');
|
||||
|
||||
// Root research
|
||||
assert(project.research.length >= 1, 'root research parsed');
|
||||
|
||||
// Config
|
||||
assertEq(project.config?.projectName, 'test-project', 'config projectName');
|
||||
|
||||
// State
|
||||
assert(project.state?.currentPhase?.includes('30') ?? false, 'state current phase');
|
||||
assertEq(project.state?.status, 'in-progress', 'state status');
|
||||
|
||||
// Validation
|
||||
assertEq(project.validation.valid, true, 'validation passes for complete dir');
|
||||
assertEq(project.validation.issues.length, 0, 'no validation issues');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 2: Minimal .planning directory (only ROADMAP.md) ─────────────
|
||||
console.log('\n=== Minimal .planning directory (only ROADMAP.md) ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
|
||||
assertEq(project.project, null, 'minimal: PROJECT.md is null');
|
||||
assert(project.roadmap !== null, 'minimal: ROADMAP.md parsed');
|
||||
assertEq(project.requirements.length, 0, 'minimal: no requirements');
|
||||
assertEq(project.state, null, 'minimal: no state');
|
||||
assertEq(project.config, null, 'minimal: no config');
|
||||
assertEq(Object.keys(project.phases).length, 0, 'minimal: no phases');
|
||||
assertEq(project.quickTasks.length, 0, 'minimal: no quick tasks');
|
||||
assertEq(project.milestones.length, 0, 'minimal: no milestones');
|
||||
assertEq(project.research.length, 0, 'minimal: no research');
|
||||
assertEq(project.validation.valid, true, 'minimal: validation passes');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 3: Missing directory → validation fatal error ────────────────
|
||||
console.log('\n=== Missing directory → validation returns fatal error ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const result = await validatePlanningDirectory(join(base, 'nonexistent'));
|
||||
|
||||
assertEq(result.valid, false, 'missing dir: validation fails');
|
||||
assert(result.issues.length > 0, 'missing dir: has issues');
|
||||
assert(
|
||||
result.issues.some(i => i.severity === 'fatal'),
|
||||
'missing dir: has fatal issue'
|
||||
);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 4: Duplicate phase numbers ───────────────────────────────────
|
||||
console.log('\n=== Phase directory with duplicate numbers ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
|
||||
const phasesDir = join(planning, 'phases');
|
||||
mkdirSync(join(phasesDir, '45-core-infrastructure'), { recursive: true });
|
||||
mkdirSync(join(phasesDir, '45-logging-config'), { recursive: true });
|
||||
|
||||
writeFileSync(
|
||||
join(phasesDir, '45-core-infrastructure', '45-01-PLAN.md'),
|
||||
'# Core Plan\n\n<objective>Core infra</objective>'
|
||||
);
|
||||
writeFileSync(
|
||||
join(phasesDir, '45-logging-config', '45-01-PLAN.md'),
|
||||
'# Logging Plan\n\n<objective>Logging config</objective>'
|
||||
);
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
|
||||
assert('45-core-infrastructure' in project.phases, 'dup nums: core-infrastructure phase present');
|
||||
assert('45-logging-config' in project.phases, 'dup nums: logging-config phase present');
|
||||
assertEq(project.phases['45-core-infrastructure']?.number, 45, 'dup nums: both have number 45 (a)');
|
||||
assertEq(project.phases['45-logging-config']?.number, 45, 'dup nums: both have number 45 (b)');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 5: XML-in-markdown plan parsing ──────────────────────────────
|
||||
console.log('\n=== Plan file with XML-in-markdown ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
|
||||
const phaseDir = join(planning, 'phases', '29-auth-system');
|
||||
mkdirSync(phaseDir, { recursive: true });
|
||||
writeFileSync(join(phaseDir, '29-01-PLAN.md'), SAMPLE_PLAN_XML);
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
const plan = project.phases['29-auth-system']?.plans?.['01'];
|
||||
|
||||
assert(plan !== undefined, 'xml plan: plan exists');
|
||||
assert(plan?.objective?.includes('authentication') ?? false, 'xml plan: objective extracted');
|
||||
assert((plan?.tasks?.length ?? 0) === 3, 'xml plan: 3 tasks extracted');
|
||||
assert(plan?.tasks?.[0]?.includes('auth middleware') ?? false, 'xml plan: first task content');
|
||||
assert(plan?.context?.includes('JWT') ?? false, 'xml plan: context extracted');
|
||||
assert(plan?.verification?.includes('Login returns') ?? false, 'xml plan: verification extracted');
|
||||
assert(plan?.successCriteria?.includes('endpoints respond') ?? false, 'xml plan: success criteria extracted');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 6: Summary file with YAML frontmatter ───────────────────────
|
||||
console.log('\n=== Summary file with YAML frontmatter ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
|
||||
const phaseDir = join(planning, 'phases', '29-auth-system');
|
||||
mkdirSync(phaseDir, { recursive: true });
|
||||
writeFileSync(join(phaseDir, '29-01-SUMMARY.md'), SAMPLE_SUMMARY);
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
const summary = project.phases['29-auth-system']?.summaries?.['01'];
|
||||
|
||||
assert(summary !== undefined, 'summary fm: summary exists');
|
||||
assertEq(summary?.frontmatter?.phase, '29-auth-system', 'summary fm: phase');
|
||||
assertEq(summary?.frontmatter?.plan, '01', 'summary fm: plan');
|
||||
assertEq(summary?.frontmatter?.subsystem, 'auth', 'summary fm: subsystem');
|
||||
assertEq(summary?.frontmatter?.tags, ['authentication', 'security'], 'summary fm: tags');
|
||||
assertEq(summary?.frontmatter?.provides, ['auth-middleware', 'jwt-validation'], 'summary fm: provides');
|
||||
assertEq(summary?.frontmatter?.affects, ['api-routes'], 'summary fm: affects');
|
||||
assertEq(summary?.frontmatter?.['tech-stack'], ['jsonwebtoken', 'express'], 'summary fm: tech-stack');
|
||||
assertEq(summary?.frontmatter?.['key-files'], ['src/auth.ts', 'src/middleware/auth.ts'], 'summary fm: key-files');
|
||||
assertEq(summary?.frontmatter?.['key-decisions'], ['Use RS256 for JWT signing', 'Store refresh tokens in DB'], 'summary fm: key-decisions');
|
||||
assertEq(summary?.frontmatter?.['patterns-established'], ['Middleware-based auth'], 'summary fm: patterns-established');
|
||||
assertEq(summary?.frontmatter?.duration, '2h', 'summary fm: duration');
|
||||
assertEq(summary?.frontmatter?.completed, '2026-01-15', 'summary fm: completed');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 7: Orphan summaries (no matching plan) ──────────────────────
|
||||
console.log('\n=== Orphan summaries (no matching plan) ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
|
||||
const phaseDir = join(planning, 'phases', '45-logging-config');
|
||||
mkdirSync(phaseDir, { recursive: true });
|
||||
|
||||
// Summaries without corresponding plans
|
||||
writeFileSync(join(phaseDir, '45-04-SUMMARY.md'), `---
|
||||
phase: "45-logging-config"
|
||||
plan: "04"
|
||||
subsystem: "logging"
|
||||
---
|
||||
|
||||
# 45-04 Summary
|
||||
|
||||
Orphan summary content.
|
||||
`);
|
||||
writeFileSync(join(phaseDir, '45-05-SUMMARY.md'), `---
|
||||
phase: "45-logging-config"
|
||||
plan: "05"
|
||||
subsystem: "logging"
|
||||
---
|
||||
|
||||
# 45-05 Summary
|
||||
|
||||
Another orphan.
|
||||
`);
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
const phase = project.phases['45-logging-config'];
|
||||
|
||||
assert(phase !== undefined, 'orphan: phase exists');
|
||||
assertEq(Object.keys(phase?.plans ?? {}).length, 0, 'orphan: no plans');
|
||||
assert(Object.keys(phase?.summaries ?? {}).length >= 2, 'orphan: summaries preserved');
|
||||
assert('04' in (phase?.summaries ?? {}), 'orphan: summary 04 present');
|
||||
assert('05' in (phase?.summaries ?? {}), 'orphan: summary 05 present');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 8: .archive/ directory skipped ──────────────────────────────
|
||||
console.log('\n=== .archive/ directory → skipped by default ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
|
||||
// Normal phase
|
||||
const phaseDir = join(planning, 'phases', '29-auth-system');
|
||||
mkdirSync(phaseDir, { recursive: true });
|
||||
writeFileSync(join(phaseDir, '29-01-PLAN.md'), SAMPLE_PLAN_XML);
|
||||
|
||||
// Archived phase (should be skipped)
|
||||
const archiveDir = join(planning, '.archive', 'v2.5-deploy', '29-old-auth');
|
||||
mkdirSync(archiveDir, { recursive: true });
|
||||
writeFileSync(join(archiveDir, '29-01-PLAN.md'), '# Archived plan');
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
|
||||
assert('29-auth-system' in project.phases, 'archive: normal phase present');
|
||||
// Archive phases should not appear in the phases map
|
||||
assert(!Object.keys(project.phases).some(k => k.includes('old-auth')), 'archive: archived phase not present');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 9: Quick tasks ──────────────────────────────────────────────
|
||||
console.log('\n=== Quick tasks parsed ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
|
||||
// Quick task 1
|
||||
const qt1 = join(planning, 'quick', '001-fix-login');
|
||||
mkdirSync(qt1, { recursive: true });
|
||||
writeFileSync(join(qt1, '001-PLAN.md'), SAMPLE_QUICK_PLAN);
|
||||
writeFileSync(join(qt1, '001-SUMMARY.md'), SAMPLE_QUICK_SUMMARY);
|
||||
|
||||
// Quick task 2 (plan only, no summary)
|
||||
const qt2 = join(planning, 'quick', '002-update-deps');
|
||||
mkdirSync(qt2, { recursive: true });
|
||||
writeFileSync(join(qt2, '002-PLAN.md'), '# 002: Update Dependencies\n\nUpdate all deps.');
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
|
||||
assertEq(project.quickTasks.length, 2, 'quick: 2 quick tasks');
|
||||
assertEq(project.quickTasks[0]?.number, 1, 'quick: first task number');
|
||||
assertEq(project.quickTasks[0]?.slug, 'fix-login', 'quick: first task slug');
|
||||
assert(project.quickTasks[0]?.plan !== null, 'quick: first task has plan');
|
||||
assert(project.quickTasks[0]?.summary !== null, 'quick: first task has summary');
|
||||
assertEq(project.quickTasks[1]?.number, 2, 'quick: second task number');
|
||||
assert(project.quickTasks[1]?.plan !== null, 'quick: second task has plan');
|
||||
assertEq(project.quickTasks[1]?.summary, null, 'quick: second task has no summary');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 10: Roadmap with milestone sections and <details> ────────────
|
||||
console.log('\n=== Roadmap with milestone sections and <details> blocks ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_MILESTONE_SECTIONED_ROADMAP);
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
|
||||
assert(project.roadmap !== null, 'ms roadmap: roadmap parsed');
|
||||
assert((project.roadmap?.milestones?.length ?? 0) >= 2, 'ms roadmap: has milestone sections');
|
||||
|
||||
// Check collapsed milestone
|
||||
const v20 = project.roadmap?.milestones?.find(m => m.id.includes('2.0'));
|
||||
assert(v20 !== undefined, 'ms roadmap: v2.0 milestone found');
|
||||
assertEq(v20?.collapsed, true, 'ms roadmap: v2.0 is collapsed');
|
||||
assert((v20?.phases?.length ?? 0) >= 2, 'ms roadmap: v2.0 has phases');
|
||||
assert(v20?.phases?.every(p => p.done) ?? false, 'ms roadmap: v2.0 phases all done');
|
||||
|
||||
// Check active milestone
|
||||
const v25 = project.roadmap?.milestones?.find(m => m.id.includes('2.5'));
|
||||
assert(v25 !== undefined, 'ms roadmap: v2.5 milestone found');
|
||||
assertEq(v25?.collapsed, false, 'ms roadmap: v2.5 is not collapsed');
|
||||
assert((v25?.phases?.length ?? 0) >= 3, 'ms roadmap: v2.5 has phases');
|
||||
|
||||
// Check completion state
|
||||
const phase29 = v25?.phases?.find(p => p.number === 29);
|
||||
assert(phase29?.done === true, 'ms roadmap: phase 29 is done');
|
||||
const phase30 = v25?.phases?.find(p => p.number === 30);
|
||||
assert(phase30?.done === false, 'ms roadmap: phase 30 is not done');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 11: Non-standard phase files → extra files ──────────────────
|
||||
console.log('\n=== Non-standard phase files → collected as extra files ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
|
||||
const phaseDir = join(planning, 'phases', '36-attachment-system');
|
||||
mkdirSync(phaseDir, { recursive: true });
|
||||
writeFileSync(join(phaseDir, '36-01-PLAN.md'), '<objective>Attachments</objective>');
|
||||
writeFileSync(join(phaseDir, 'BASELINE.md'), '# Baseline\n\nBaseline measurements.');
|
||||
writeFileSync(join(phaseDir, 'BUNDLE-ANALYSIS.md'), '# Bundle Analysis\n\nResults.');
|
||||
writeFileSync(join(phaseDir, 'depcheck-results.txt'), 'unused: pkg-a, pkg-b');
|
||||
|
||||
const project = await parsePlanningDirectory(planning);
|
||||
const phase = project.phases['36-attachment-system'];
|
||||
|
||||
assert(phase !== undefined, 'extra: phase exists');
|
||||
assert((phase?.extraFiles?.length ?? 0) >= 3, 'extra: non-standard files collected');
|
||||
assert(
|
||||
phase?.extraFiles?.some(f => f.fileName === 'BASELINE.md') ?? false,
|
||||
'extra: BASELINE.md collected'
|
||||
);
|
||||
assert(
|
||||
phase?.extraFiles?.some(f => f.fileName === 'BUNDLE-ANALYSIS.md') ?? false,
|
||||
'extra: BUNDLE-ANALYSIS.md collected'
|
||||
);
|
||||
assert(
|
||||
phase?.extraFiles?.some(f => f.fileName === 'depcheck-results.txt') ?? false,
|
||||
'extra: depcheck-results.txt collected'
|
||||
);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 12: Validation — missing ROADMAP.md → fatal ─────────────────
|
||||
console.log('\n=== Validation: missing ROADMAP.md → fatal ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
// Only PROJECT.md, no ROADMAP.md
|
||||
writeFileSync(join(planning, 'PROJECT.md'), SAMPLE_PROJECT);
|
||||
|
||||
const result = await validatePlanningDirectory(planning);
|
||||
|
||||
assertEq(result.valid, false, 'no roadmap: validation fails');
|
||||
assert(
|
||||
result.issues.some(i => i.severity === 'fatal' && i.file.includes('ROADMAP')),
|
||||
'no roadmap: fatal issue mentions ROADMAP'
|
||||
);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 13: Validation — missing PROJECT.md → warning ───────────────
|
||||
console.log('\n=== Validation: missing PROJECT.md → warning ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
// No PROJECT.md
|
||||
|
||||
const result = await validatePlanningDirectory(planning);
|
||||
|
||||
assertEq(result.valid, true, 'no project: validation passes (warning only)');
|
||||
assert(
|
||||
result.issues.some(i => i.severity === 'warning' && i.file.includes('PROJECT')),
|
||||
'no project: warning issue mentions PROJECT'
|
||||
);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ═════════════════════════════════════════════════════════════════════════
|
||||
// Results
|
||||
// ═════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log(`\n${'='.repeat(40)}`);
|
||||
console.log(`Results: ${passed} passed, ${failed} failed`);
|
||||
if (failed > 0) {
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('All tests passed ✓');
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
657
src/resources/extensions/gsd/tests/migrate-transformer.test.ts
Normal file
657
src/resources/extensions/gsd/tests/migrate-transformer.test.ts
Normal file
|
|
@ -0,0 +1,657 @@
|
|||
// Migration transformer test suite
|
||||
// Tests for transforming parsed PlanningProject into GSDProject structures.
|
||||
// Uses synthetic in-memory fixtures — no filesystem needed.
|
||||
// Transformer is pure: PlanningProject → GSDProject.
|
||||
|
||||
import { transformToGSD } from '../migrate/transformer.ts';
|
||||
import type {
|
||||
PlanningProject,
|
||||
PlanningPhase,
|
||||
PlanningPlan,
|
||||
PlanningSummary,
|
||||
PlanningRoadmap,
|
||||
PlanningRoadmapEntry,
|
||||
PlanningRoadmapMilestone,
|
||||
PlanningRequirement,
|
||||
PlanningResearch,
|
||||
GSDProject,
|
||||
GSDMilestone,
|
||||
GSDSlice,
|
||||
GSDTask,
|
||||
} from '../migrate/types.ts';
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
function assert(condition: boolean, message: string): void {
|
||||
if (condition) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function assertEq<T>(actual: T, expected: T, message: string): void {
|
||||
if (JSON.stringify(actual) === JSON.stringify(expected)) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message} — expected ${JSON.stringify(expected)}, got ${JSON.stringify(actual)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Fixture Helpers ───────────────────────────────────────────────────────
|
||||
|
||||
function emptyProject(overrides: Partial<PlanningProject> = {}): PlanningProject {
|
||||
return {
|
||||
path: '/fake/.planning',
|
||||
project: null,
|
||||
roadmap: null,
|
||||
requirements: [],
|
||||
state: null,
|
||||
config: null,
|
||||
phases: {},
|
||||
quickTasks: [],
|
||||
milestones: [],
|
||||
research: [],
|
||||
validation: { valid: true, issues: [] },
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function flatRoadmap(entries: PlanningRoadmapEntry[]): PlanningRoadmap {
|
||||
return {
|
||||
raw: entries.map((e) => `- [${e.done ? 'x' : ' '}] Phase ${e.number}: ${e.title}`).join('\n'),
|
||||
milestones: [],
|
||||
phases: entries,
|
||||
};
|
||||
}
|
||||
|
||||
function milestoneRoadmap(milestones: PlanningRoadmapMilestone[]): PlanningRoadmap {
|
||||
return {
|
||||
raw: milestones.map((m) => `## ${m.id}: ${m.title}`).join('\n'),
|
||||
milestones,
|
||||
phases: [],
|
||||
};
|
||||
}
|
||||
|
||||
function roadmapEntry(number: number, title: string, done = false): PlanningRoadmapEntry {
|
||||
return { number, title, done, raw: `- [${done ? 'x' : ' '}] Phase ${number}: ${title}` };
|
||||
}
|
||||
|
||||
function makePhase(dirName: string, number: number, slug: string, overrides: Partial<PlanningPhase> = {}): PlanningPhase {
|
||||
return {
|
||||
dirName,
|
||||
number,
|
||||
slug,
|
||||
plans: {},
|
||||
summaries: {},
|
||||
research: [],
|
||||
verifications: [],
|
||||
extraFiles: [],
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makePlan(planNumber: string, overrides: Partial<PlanningPlan> = {}): PlanningPlan {
|
||||
return {
|
||||
fileName: `00-${planNumber}-PLAN.md`,
|
||||
planNumber,
|
||||
frontmatter: {
|
||||
phase: '00',
|
||||
plan: planNumber,
|
||||
type: 'implementation',
|
||||
wave: null,
|
||||
depends_on: [],
|
||||
files_modified: [],
|
||||
autonomous: false,
|
||||
must_haves: null,
|
||||
},
|
||||
objective: `Objective for plan ${planNumber}`,
|
||||
tasks: [`Task 1 for plan ${planNumber}`],
|
||||
context: '',
|
||||
verification: '',
|
||||
successCriteria: '',
|
||||
raw: '',
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeSummary(planNumber: string, overrides: Partial<PlanningSummary> = {}): PlanningSummary {
|
||||
return {
|
||||
fileName: `00-${planNumber}-SUMMARY.md`,
|
||||
planNumber,
|
||||
frontmatter: {
|
||||
phase: '00',
|
||||
plan: planNumber,
|
||||
subsystem: 'core',
|
||||
tags: [],
|
||||
requires: [],
|
||||
provides: [`feature-${planNumber}`],
|
||||
affects: [],
|
||||
'tech-stack': [],
|
||||
'key-files': [`file-${planNumber}.ts`],
|
||||
'key-decisions': [`decision-${planNumber}`],
|
||||
'patterns-established': [],
|
||||
duration: '2h',
|
||||
completed: '2026-01-15',
|
||||
},
|
||||
body: `Summary body for plan ${planNumber}`,
|
||||
raw: '',
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeRequirement(id: string, title: string, status = 'active'): PlanningRequirement {
|
||||
return { id, title, status, description: `Description for ${id}`, raw: '' };
|
||||
}
|
||||
|
||||
function makeResearch(fileName: string, content: string): PlanningResearch {
|
||||
return { fileName, content };
|
||||
}
|
||||
|
||||
// ─── Scenario 1: Flat Single-Milestone (3 phases → M001 with S01/S02/S03) ──
|
||||
|
||||
{
|
||||
console.log('Scenario 1: Flat single-milestone');
|
||||
|
||||
const project = emptyProject({
|
||||
project: '# My Project\nA cool project.',
|
||||
roadmap: flatRoadmap([
|
||||
roadmapEntry(1, 'setup'),
|
||||
roadmapEntry(2, 'core-logic'),
|
||||
roadmapEntry(3, 'polish'),
|
||||
]),
|
||||
phases: {
|
||||
'1-setup': makePhase('1-setup', 1, 'setup', {
|
||||
plans: { '01': makePlan('01') },
|
||||
}),
|
||||
'2-core-logic': makePhase('2-core-logic', 2, 'core-logic', {
|
||||
plans: { '01': makePlan('01'), '02': makePlan('02') },
|
||||
}),
|
||||
'3-polish': makePhase('3-polish', 3, 'polish', {
|
||||
plans: { '01': makePlan('01') },
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
assertEq(result.milestones.length, 1, 'flat: produces 1 milestone');
|
||||
assert(result.milestones[0]?.id === 'M001', 'flat: milestone ID is M001');
|
||||
assertEq(result.milestones[0]?.slices.length, 3, 'flat: 3 slices');
|
||||
assertEq(result.milestones[0]?.slices[0]?.id, 'S01', 'flat: first slice is S01');
|
||||
assertEq(result.milestones[0]?.slices[1]?.id, 'S02', 'flat: second slice is S02');
|
||||
assertEq(result.milestones[0]?.slices[2]?.id, 'S03', 'flat: third slice is S03');
|
||||
assert(result.milestones[0]?.slices[0]?.title.length > 0, 'flat: slice title not empty');
|
||||
assertEq(result.milestones[0]?.slices[0]?.tasks.length, 1, 'flat: S01 has 1 task');
|
||||
assertEq(result.milestones[0]?.slices[1]?.tasks.length, 2, 'flat: S02 has 2 tasks');
|
||||
assertEq(result.milestones[0]?.slices[2]?.tasks.length, 1, 'flat: S03 has 1 task');
|
||||
assertEq(result.milestones[0]?.slices[0]?.tasks[0]?.id, 'T01', 'flat: first task is T01');
|
||||
assertEq(result.milestones[0]?.slices[1]?.tasks[1]?.id, 'T02', 'flat: second task in S02 is T02');
|
||||
assert(result.projectContent.includes('My Project'), 'flat: projectContent preserved');
|
||||
assertEq(result.milestones[0]?.boundaryMap, [], 'flat: boundaryMap defaults to empty');
|
||||
}
|
||||
|
||||
// ─── Scenario 2: Multi-Milestone (2 milestones with independent numbering) ──
|
||||
|
||||
{
|
||||
console.log('Scenario 2: Multi-milestone');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: milestoneRoadmap([
|
||||
{
|
||||
id: 'v1',
|
||||
title: 'Version One',
|
||||
collapsed: false,
|
||||
phases: [roadmapEntry(1, 'alpha'), roadmapEntry(2, 'beta')],
|
||||
},
|
||||
{
|
||||
id: 'v2',
|
||||
title: 'Version Two',
|
||||
collapsed: false,
|
||||
phases: [roadmapEntry(1, 'gamma'), roadmapEntry(2, 'delta'), roadmapEntry(3, 'epsilon')],
|
||||
},
|
||||
]),
|
||||
phases: {
|
||||
'1-alpha': makePhase('1-alpha', 1, 'alpha', { plans: { '01': makePlan('01') } }),
|
||||
'2-beta': makePhase('2-beta', 2, 'beta', { plans: { '01': makePlan('01') } }),
|
||||
'1-gamma': makePhase('1-gamma', 1, 'gamma', { plans: { '01': makePlan('01') } }),
|
||||
'2-delta': makePhase('2-delta', 2, 'delta', { plans: { '01': makePlan('01') } }),
|
||||
'3-epsilon': makePhase('3-epsilon', 3, 'epsilon', { plans: { '01': makePlan('01') } }),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
assertEq(result.milestones.length, 2, 'multi: 2 milestones');
|
||||
assertEq(result.milestones[0]?.id, 'M001', 'multi: first milestone M001');
|
||||
assertEq(result.milestones[1]?.id, 'M002', 'multi: second milestone M002');
|
||||
assertEq(result.milestones[0]?.slices.length, 2, 'multi: M001 has 2 slices');
|
||||
assertEq(result.milestones[1]?.slices.length, 3, 'multi: M002 has 3 slices');
|
||||
// Independent numbering: both start at S01
|
||||
assertEq(result.milestones[0]?.slices[0]?.id, 'S01', 'multi: M001 starts at S01');
|
||||
assertEq(result.milestones[1]?.slices[0]?.id, 'S01', 'multi: M002 starts at S01');
|
||||
assertEq(result.milestones[1]?.slices[2]?.id, 'S03', 'multi: M002 third slice is S03');
|
||||
assert(result.milestones[0]?.title.length > 0, 'multi: M001 has title');
|
||||
assert(result.milestones[1]?.title.length > 0, 'multi: M002 has title');
|
||||
}
|
||||
|
||||
// ─── Scenario 3: Decimal Phase Ordering (1, 2, 2.1, 2.2, 3 → S01–S05) ──
|
||||
|
||||
{
|
||||
console.log('Scenario 3: Decimal phase ordering');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([
|
||||
roadmapEntry(1, 'foundation'),
|
||||
roadmapEntry(2, 'main-feature'),
|
||||
roadmapEntry(2.1, 'sub-feature-a'),
|
||||
roadmapEntry(2.2, 'sub-feature-b'),
|
||||
roadmapEntry(3, 'finalize'),
|
||||
]),
|
||||
phases: {
|
||||
'1-foundation': makePhase('1-foundation', 1, 'foundation'),
|
||||
'2-main-feature': makePhase('2-main-feature', 2, 'main-feature'),
|
||||
'2.1-sub-feature-a': makePhase('2.1-sub-feature-a', 2.1, 'sub-feature-a'),
|
||||
'2.2-sub-feature-b': makePhase('2.2-sub-feature-b', 2.2, 'sub-feature-b'),
|
||||
'3-finalize': makePhase('3-finalize', 3, 'finalize'),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
assertEq(result.milestones[0]?.slices.length, 5, 'decimal: 5 slices total');
|
||||
assertEq(result.milestones[0]?.slices[0]?.id, 'S01', 'decimal: first is S01');
|
||||
assertEq(result.milestones[0]?.slices[1]?.id, 'S02', 'decimal: second is S02');
|
||||
assertEq(result.milestones[0]?.slices[2]?.id, 'S03', 'decimal: third is S03');
|
||||
assertEq(result.milestones[0]?.slices[3]?.id, 'S04', 'decimal: fourth is S04');
|
||||
assertEq(result.milestones[0]?.slices[4]?.id, 'S05', 'decimal: fifth is S05');
|
||||
// Order must be by float value: 1, 2, 2.1, 2.2, 3
|
||||
assert(
|
||||
result.milestones[0]?.slices[0]?.title.toLowerCase().includes('foundation'),
|
||||
'decimal: S01 is foundation (phase 1)',
|
||||
);
|
||||
assert(
|
||||
result.milestones[0]?.slices[4]?.title.toLowerCase().includes('finalize'),
|
||||
'decimal: S05 is finalize (phase 3)',
|
||||
);
|
||||
}
|
||||
|
||||
// ─── Scenario 4: Completion State ──────────────────────────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 4: Completion state mapping');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([
|
||||
roadmapEntry(1, 'done-phase', true),
|
||||
roadmapEntry(2, 'active-phase', false),
|
||||
]),
|
||||
phases: {
|
||||
'1-done-phase': makePhase('1-done-phase', 1, 'done-phase', {
|
||||
plans: { '01': makePlan('01'), '02': makePlan('02') },
|
||||
summaries: {
|
||||
'01': makeSummary('01'),
|
||||
// plan 02 has no summary → task not done
|
||||
},
|
||||
}),
|
||||
'2-active-phase': makePhase('2-active-phase', 2, 'active-phase', {
|
||||
plans: { '01': makePlan('01') },
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
const doneSlice = result.milestones[0]?.slices[0];
|
||||
const activeSlice = result.milestones[0]?.slices[1];
|
||||
|
||||
assert(doneSlice?.done === true, 'completion: done phase → done slice');
|
||||
assert(activeSlice?.done === false, 'completion: active phase → not-done slice');
|
||||
assert(doneSlice?.tasks[0]?.done === true, 'completion: plan with summary → done task');
|
||||
assert(doneSlice?.tasks[1]?.done === false, 'completion: plan without summary → not-done task');
|
||||
assert(doneSlice?.tasks[0]?.summary !== null, 'completion: done task has summary data');
|
||||
assert(doneSlice?.tasks[1]?.summary === null, 'completion: not-done task has null summary');
|
||||
assertEq(doneSlice?.tasks[0]?.summary?.completedAt, '2026-01-15', 'completion: summary completedAt from frontmatter');
|
||||
assertEq(doneSlice?.tasks[0]?.summary?.duration, '2h', 'completion: summary duration from frontmatter');
|
||||
assertEq(doneSlice?.tasks[0]?.summary?.provides, ['feature-01'], 'completion: summary provides from frontmatter');
|
||||
assertEq(doneSlice?.tasks[0]?.summary?.keyFiles, ['file-01.ts'], 'completion: summary keyFiles from frontmatter');
|
||||
assert(doneSlice?.tasks[0]?.summary?.whatHappened?.includes('Summary body'), 'completion: summary whatHappened from body');
|
||||
assert(doneSlice?.summary !== null, 'completion: done slice has slice summary');
|
||||
assert(activeSlice?.summary === null, 'completion: active slice has null summary');
|
||||
assertEq(doneSlice?.tasks[0]?.estimate, '2h', 'completion: task estimate from summary duration');
|
||||
}
|
||||
|
||||
// ─── Scenario 5: Research Consolidation ────────────────────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 5: Research consolidation');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([roadmapEntry(1, 'researched-phase')]),
|
||||
research: [
|
||||
makeResearch('SUMMARY.md', '# Project Summary\nOverview content.'),
|
||||
makeResearch('ARCHITECTURE.md', '# Architecture\nArch details.'),
|
||||
makeResearch('PITFALLS.md', '# Pitfalls\nThings to avoid.'),
|
||||
],
|
||||
phases: {
|
||||
'1-researched-phase': makePhase('1-researched-phase', 1, 'researched-phase', {
|
||||
research: [
|
||||
makeResearch('FEATURES.md', '# Phase Features\nFeature list.'),
|
||||
],
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
// Project-level research → milestone research
|
||||
assert(result.milestones[0]?.research !== null, 'research: milestone has consolidated research');
|
||||
assert(result.milestones[0]?.research!.includes('Project Summary'), 'research: includes SUMMARY content');
|
||||
assert(result.milestones[0]?.research!.includes('Architecture'), 'research: includes ARCHITECTURE content');
|
||||
assert(result.milestones[0]?.research!.includes('Pitfalls'), 'research: includes PITFALLS content');
|
||||
|
||||
// Fixed ordering: SUMMARY before ARCHITECTURE before PITFALLS
|
||||
const summaryIdx = result.milestones[0]?.research!.indexOf('Project Summary') ?? -1;
|
||||
const archIdx = result.milestones[0]?.research!.indexOf('Architecture') ?? -1;
|
||||
const pitfallIdx = result.milestones[0]?.research!.indexOf('Pitfalls') ?? -1;
|
||||
assert(summaryIdx < archIdx, 'research: SUMMARY before ARCHITECTURE in consolidated');
|
||||
assert(archIdx < pitfallIdx, 'research: ARCHITECTURE before PITFALLS in consolidated');
|
||||
|
||||
// Phase-level research → slice research
|
||||
const slice = result.milestones[0]?.slices[0];
|
||||
assert(slice?.research !== null, 'research: slice has phase research');
|
||||
assert(slice?.research!.includes('Phase Features'), 'research: slice research includes phase content');
|
||||
}
|
||||
|
||||
// ─── Scenario 6: Requirements Classification ──────────────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 6: Requirements classification');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([roadmapEntry(1, 'req-phase')]),
|
||||
requirements: [
|
||||
makeRequirement('R001', 'Core Feature', 'active'),
|
||||
makeRequirement('R002', 'Secondary Feature', 'validated'),
|
||||
makeRequirement('R003', 'Deferred Feature', 'deferred'),
|
||||
],
|
||||
phases: {
|
||||
'1-req-phase': makePhase('1-req-phase', 1, 'req-phase'),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
assertEq(result.requirements.length, 3, 'requirements: 3 requirements');
|
||||
assertEq(result.requirements[0]?.id, 'R001', 'requirements: first is R001');
|
||||
assertEq(result.requirements[0]?.status, 'active', 'requirements: R001 status active');
|
||||
assertEq(result.requirements[1]?.status, 'validated', 'requirements: R002 status validated');
|
||||
assertEq(result.requirements[2]?.status, 'deferred', 'requirements: R003 status deferred');
|
||||
assert(result.requirements[0]?.title === 'Core Feature', 'requirements: R001 title preserved');
|
||||
assert(result.requirements[0]?.description.includes('Description for R001'), 'requirements: R001 description preserved');
|
||||
assertEq(result.requirements[0]?.class, 'core-capability', 'requirements: default class');
|
||||
assertEq(result.requirements[0]?.source, 'inferred', 'requirements: default source');
|
||||
assertEq(result.requirements[0]?.primarySlice, 'none yet', 'requirements: default primarySlice');
|
||||
}
|
||||
|
||||
// ─── Scenario 7: Empty Phase (no plans → slice with 0 tasks) ───────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 7: Empty phase');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([
|
||||
roadmapEntry(1, 'empty-phase'),
|
||||
roadmapEntry(2, 'non-empty-phase'),
|
||||
]),
|
||||
phases: {
|
||||
'1-empty-phase': makePhase('1-empty-phase', 1, 'empty-phase'),
|
||||
'2-non-empty-phase': makePhase('2-non-empty-phase', 2, 'non-empty-phase', {
|
||||
plans: { '01': makePlan('01') },
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
assertEq(result.milestones[0]?.slices[0]?.tasks.length, 0, 'empty: empty phase → 0 tasks');
|
||||
assertEq(result.milestones[0]?.slices[1]?.tasks.length, 1, 'empty: non-empty phase → 1 task');
|
||||
assert(result.milestones[0]?.slices[0]?.id === 'S01', 'empty: empty slice still gets ID');
|
||||
}
|
||||
|
||||
// ─── Scenario 8: Demo Derivation from Plan Objective ───────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 8: Demo derivation');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([roadmapEntry(1, 'demo-phase')]),
|
||||
phases: {
|
||||
'1-demo-phase': makePhase('1-demo-phase', 1, 'demo-phase', {
|
||||
plans: {
|
||||
'01': makePlan('01', { objective: 'Build the authentication system with JWT tokens.' }),
|
||||
},
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
assert(result.milestones[0]?.slices[0]?.demo.length > 0, 'demo: slice demo is not empty');
|
||||
assert(
|
||||
result.milestones[0]?.slices[0]?.demo.includes('authentication') ||
|
||||
result.milestones[0]?.slices[0]?.demo.includes('Build'),
|
||||
'demo: slice demo derived from first plan objective',
|
||||
);
|
||||
assert(result.milestones[0]?.slices[0]?.goal.length > 0, 'demo: slice goal is not empty');
|
||||
}
|
||||
|
||||
// ─── Scenario 9: Field Defaults and Type Safety ────────────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 9: Field defaults');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([roadmapEntry(1, 'defaults-phase')]),
|
||||
phases: {
|
||||
'1-defaults-phase': makePhase('1-defaults-phase', 1, 'defaults-phase', {
|
||||
plans: {
|
||||
'01': makePlan('01', {
|
||||
frontmatter: {
|
||||
phase: '01',
|
||||
plan: '01',
|
||||
type: 'implementation',
|
||||
wave: null,
|
||||
depends_on: [],
|
||||
files_modified: ['src/auth.ts', 'src/db.ts'],
|
||||
autonomous: false,
|
||||
must_haves: { truths: ['Auth works', 'DB connected'], artifacts: [], key_links: [] },
|
||||
},
|
||||
}),
|
||||
},
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
const slice = result.milestones[0]?.slices[0];
|
||||
const task = slice?.tasks[0];
|
||||
|
||||
assertEq(slice?.risk, 'medium', 'defaults: slice risk defaults to medium');
|
||||
assertEq(slice?.depends, [], 'defaults: S01 has no depends');
|
||||
assert(task?.description.length > 0, 'defaults: task description not empty');
|
||||
assertEq(task?.files, ['src/auth.ts', 'src/db.ts'], 'defaults: task files from frontmatter');
|
||||
assertEq(task?.mustHaves, ['Auth works', 'DB connected'], 'defaults: task mustHaves from frontmatter');
|
||||
assertEq(task?.done, false, 'defaults: task without summary is not done');
|
||||
assertEq(task?.estimate, '', 'defaults: task without summary has empty estimate');
|
||||
assert(task?.summary === null, 'defaults: task without summary has null summary');
|
||||
}
|
||||
|
||||
// ─── Scenario 10: Sequential Depends ──────────────────────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 10: Sequential depends');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([
|
||||
roadmapEntry(1, 'first'),
|
||||
roadmapEntry(2, 'second'),
|
||||
roadmapEntry(3, 'third'),
|
||||
]),
|
||||
phases: {
|
||||
'1-first': makePhase('1-first', 1, 'first'),
|
||||
'2-second': makePhase('2-second', 2, 'second'),
|
||||
'3-third': makePhase('3-third', 3, 'third'),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
const slices = result.milestones[0]?.slices;
|
||||
|
||||
assertEq(slices?.[0]?.depends, [], 'depends: S01 has empty depends');
|
||||
assertEq(slices?.[1]?.depends, ['S01'], 'depends: S02 depends on S01');
|
||||
assertEq(slices?.[2]?.depends, ['S02'], 'depends: S03 depends on S02');
|
||||
}
|
||||
|
||||
// ─── Scenario 11: Requirements with unknown status and missing IDs ─────────
|
||||
|
||||
{
|
||||
console.log('Scenario 11: Requirements edge cases');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([roadmapEntry(1, 'req-edge')]),
|
||||
requirements: [
|
||||
makeRequirement('', 'No ID Feature', 'active'),
|
||||
makeRequirement('', 'Another No ID', 'validated'),
|
||||
makeRequirement('R005', 'Has ID', 'something-weird'),
|
||||
makeRequirement('R006', 'Deferred One', 'DEFERRED'),
|
||||
],
|
||||
phases: {
|
||||
'1-req-edge': makePhase('1-req-edge', 1, 'req-edge'),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
assertEq(result.requirements[0]?.id, 'R001', 'req-edge: empty id gets R001');
|
||||
assertEq(result.requirements[1]?.id, 'R002', 'req-edge: second empty id gets R002');
|
||||
assertEq(result.requirements[2]?.id, 'R005', 'req-edge: existing id preserved');
|
||||
assertEq(result.requirements[2]?.status, 'active', 'req-edge: unknown status normalized to active');
|
||||
assertEq(result.requirements[3]?.status, 'deferred', 'req-edge: uppercase DEFERRED normalized');
|
||||
}
|
||||
|
||||
// ─── Scenario 12: Vision derivation ────────────────────────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 12: Vision derivation');
|
||||
|
||||
// Vision from project description
|
||||
const project1 = emptyProject({
|
||||
project: '# Cool Project\nA revolutionary tool for developers.',
|
||||
roadmap: flatRoadmap([roadmapEntry(1, 'vision-phase')]),
|
||||
phases: { '1-vision-phase': makePhase('1-vision-phase', 1, 'vision-phase') },
|
||||
});
|
||||
|
||||
const result1 = transformToGSD(project1);
|
||||
assert(result1.milestones[0]?.vision.includes('revolutionary'), 'vision: derived from project first line');
|
||||
|
||||
// Vision fallback when no project
|
||||
const project2 = emptyProject({
|
||||
roadmap: flatRoadmap([roadmapEntry(1, 'fallback')]),
|
||||
phases: { '1-fallback': makePhase('1-fallback', 1, 'fallback') },
|
||||
});
|
||||
|
||||
const result2 = transformToGSD(project2);
|
||||
assert(result2.milestones[0]?.vision.length > 0, 'vision: fallback is non-empty');
|
||||
}
|
||||
|
||||
// ─── Scenario 13: Decisions content from summaries ─────────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 13: Decisions content');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([roadmapEntry(1, 'decision-phase', true)]),
|
||||
phases: {
|
||||
'1-decision-phase': makePhase('1-decision-phase', 1, 'decision-phase', {
|
||||
plans: { '01': makePlan('01') },
|
||||
summaries: { '01': makeSummary('01') },
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
assert(result.decisionsContent.includes('decision-01'), 'decisions: extracts key-decisions from summaries');
|
||||
}
|
||||
|
||||
// ─── Scenario 14: No undefined values in output ───────────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 14: No undefined values');
|
||||
|
||||
const project = emptyProject({
|
||||
project: '# Test\nDescription.',
|
||||
roadmap: flatRoadmap([
|
||||
roadmapEntry(1, 'full-phase', true),
|
||||
roadmapEntry(2, 'empty-phase', false),
|
||||
]),
|
||||
requirements: [makeRequirement('R001', 'Req', 'active')],
|
||||
research: [makeResearch('SUMMARY.md', 'Research content')],
|
||||
phases: {
|
||||
'1-full-phase': makePhase('1-full-phase', 1, 'full-phase', {
|
||||
plans: { '01': makePlan('01') },
|
||||
summaries: { '01': makeSummary('01') },
|
||||
research: [makeResearch('FEATURES.md', 'Features')],
|
||||
}),
|
||||
'2-empty-phase': makePhase('2-empty-phase', 2, 'empty-phase'),
|
||||
},
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
|
||||
// Deep check for undefined values
|
||||
function checkNoUndefined(obj: unknown, path: string): void {
|
||||
if (obj === undefined) {
|
||||
assert(false, `no-undefined: ${path} is undefined`);
|
||||
return;
|
||||
}
|
||||
if (obj === null) return; // null is allowed (e.g. research, summary)
|
||||
if (Array.isArray(obj)) {
|
||||
for (let i = 0; i < obj.length; i++) {
|
||||
checkNoUndefined(obj[i], `${path}[${i}]`);
|
||||
}
|
||||
} else if (typeof obj === 'object') {
|
||||
for (const [key, val] of Object.entries(obj as Record<string, unknown>)) {
|
||||
checkNoUndefined(val, `${path}.${key}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checkNoUndefined(result, 'result');
|
||||
assert(true, 'no-undefined: deep check completed without finding undefined values');
|
||||
}
|
||||
|
||||
// ─── Scenario 15: Research with no files ───────────────────────────────────
|
||||
|
||||
{
|
||||
console.log('Scenario 15: Empty research');
|
||||
|
||||
const project = emptyProject({
|
||||
roadmap: flatRoadmap([roadmapEntry(1, 'no-research')]),
|
||||
phases: { '1-no-research': makePhase('1-no-research', 1, 'no-research') },
|
||||
});
|
||||
|
||||
const result = transformToGSD(project);
|
||||
assert(result.milestones[0]?.research === null, 'empty-research: milestone research is null');
|
||||
assert(result.milestones[0]?.slices[0]?.research === null, 'empty-research: slice research is null');
|
||||
}
|
||||
|
||||
// ─── Results ───────────────────────────────────────────────────────────────
|
||||
|
||||
console.log(`\n${passed + failed} assertions: ${passed} passed, ${failed} failed`);
|
||||
if (failed > 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
|
@ -0,0 +1,443 @@
|
|||
// Unit tests for T02: validator and per-file parsers
|
||||
// Tests these independently of the T03 orchestrator (parsePlanningDirectory).
|
||||
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { validatePlanningDirectory } from '../migrate/validator.ts';
|
||||
import {
|
||||
parseOldRoadmap,
|
||||
parseOldPlan,
|
||||
parseOldSummary,
|
||||
parseOldRequirements,
|
||||
parseOldProject,
|
||||
parseOldState,
|
||||
parseOldConfig,
|
||||
} from '../migrate/parsers.ts';
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
function assert(condition: boolean, message: string): void {
|
||||
if (condition) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function assertEq<T>(actual: T, expected: T, message: string): void {
|
||||
if (JSON.stringify(actual) === JSON.stringify(expected)) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message} — expected ${JSON.stringify(expected)}, got ${JSON.stringify(actual)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function createFixtureBase(): string {
|
||||
return mkdtempSync(join(tmpdir(), 'gsd-migrate-t02-'));
|
||||
}
|
||||
function createPlanningDir(base: string): string {
|
||||
const dir = join(base, '.planning');
|
||||
mkdirSync(dir, { recursive: true });
|
||||
return dir;
|
||||
}
|
||||
function cleanup(base: string): void {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// ─── Sample Fixtures ───────────────────────────────────────────────────────
|
||||
|
||||
const SAMPLE_ROADMAP = `# Project Roadmap
|
||||
|
||||
## Phases
|
||||
|
||||
- [x] 29 — Auth System
|
||||
- [ ] 30 — Dashboard
|
||||
- [ ] 31 — Notifications
|
||||
`;
|
||||
|
||||
const SAMPLE_PROJECT = `# My Project
|
||||
|
||||
A sample project for testing the migration parser.
|
||||
`;
|
||||
|
||||
const SAMPLE_MILESTONE_SECTIONED_ROADMAP = `# Project Roadmap
|
||||
|
||||
## v2.0 — Foundation
|
||||
|
||||
<details>
|
||||
<summary>Completed</summary>
|
||||
|
||||
- [x] 01 — Project Setup
|
||||
- [x] 02 — Database Schema
|
||||
|
||||
</details>
|
||||
|
||||
## v2.5 — Features
|
||||
|
||||
- [x] 29 — Auth System
|
||||
- [ ] 30 — Dashboard
|
||||
- [ ] 31 — Notifications
|
||||
`;
|
||||
|
||||
const SAMPLE_PLAN_XML = `---
|
||||
phase: "29-auth-system"
|
||||
plan: "01"
|
||||
type: "implementation"
|
||||
wave: 1
|
||||
depends_on: []
|
||||
files_modified: [src/auth.ts, src/login.ts]
|
||||
autonomous: true
|
||||
must_haves:
|
||||
truths:
|
||||
- Users can log in
|
||||
artifacts:
|
||||
- src/auth.ts
|
||||
key_links: []
|
||||
---
|
||||
|
||||
# 29-01: Implement Auth
|
||||
|
||||
<objective>
|
||||
Build the authentication system with JWT tokens and session management.
|
||||
</objective>
|
||||
|
||||
<tasks>
|
||||
<task>Create auth middleware</task>
|
||||
<task>Add login endpoint</task>
|
||||
<task>Add logout endpoint</task>
|
||||
</tasks>
|
||||
|
||||
<context>
|
||||
The project needs authentication before any other features can be built.
|
||||
Auth tokens use JWT with RS256 signing.
|
||||
</context>
|
||||
|
||||
<verification>
|
||||
- Login returns valid JWT
|
||||
- Middleware rejects invalid tokens
|
||||
- Logout invalidates session
|
||||
</verification>
|
||||
|
||||
<success_criteria>
|
||||
All auth endpoints respond correctly and tokens are validated.
|
||||
</success_criteria>
|
||||
`;
|
||||
|
||||
const SAMPLE_SUMMARY = `---
|
||||
phase: "29-auth-system"
|
||||
plan: "01"
|
||||
subsystem: "auth"
|
||||
tags:
|
||||
- authentication
|
||||
- security
|
||||
requires: []
|
||||
provides:
|
||||
- auth-middleware
|
||||
- jwt-validation
|
||||
affects:
|
||||
- api-routes
|
||||
tech-stack:
|
||||
- jsonwebtoken
|
||||
- express
|
||||
key-files:
|
||||
- src/auth.ts
|
||||
- src/middleware/auth.ts
|
||||
key-decisions:
|
||||
- Use RS256 for JWT signing
|
||||
- Store refresh tokens in DB
|
||||
patterns-established:
|
||||
- Middleware-based auth
|
||||
duration: "2h"
|
||||
completed: "2026-01-15"
|
||||
---
|
||||
|
||||
# 29-01: Auth Implementation Summary
|
||||
|
||||
Authentication system implemented with JWT tokens.
|
||||
`;
|
||||
|
||||
const SAMPLE_REQUIREMENTS = `# Requirements
|
||||
|
||||
## Active
|
||||
|
||||
### R001 — User Authentication
|
||||
- Status: active
|
||||
- Description: Users must be able to log in.
|
||||
|
||||
### R002 — Dashboard View
|
||||
- Status: active
|
||||
- Description: Main dashboard page.
|
||||
|
||||
## Validated
|
||||
|
||||
### R003 — Session Management
|
||||
- Status: validated
|
||||
- Description: Sessions expire after 24h.
|
||||
|
||||
## Deferred
|
||||
|
||||
### R004 — OAuth Support
|
||||
- Status: deferred
|
||||
- Description: Third-party login.
|
||||
`;
|
||||
|
||||
const SAMPLE_STATE = `# State
|
||||
|
||||
**Current Phase:** 30-dashboard
|
||||
**Status:** in-progress
|
||||
`;
|
||||
|
||||
async function main(): Promise<void> {
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Validator Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== Validator: missing directory → fatal ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const result = await validatePlanningDirectory(join(base, 'nonexistent'));
|
||||
assertEq(result.valid, false, 'missing dir: validation fails');
|
||||
assert(result.issues.length > 0, 'missing dir: has issues');
|
||||
assert(result.issues.some(i => i.severity === 'fatal'), 'missing dir: has fatal issue');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n=== Validator: missing ROADMAP.md → fatal ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'PROJECT.md'), SAMPLE_PROJECT);
|
||||
const result = await validatePlanningDirectory(planning);
|
||||
assertEq(result.valid, false, 'no roadmap: validation fails');
|
||||
assert(result.issues.some(i => i.severity === 'fatal' && i.file.includes('ROADMAP')), 'no roadmap: fatal issue mentions ROADMAP');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n=== Validator: missing PROJECT.md → warning ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
const result = await validatePlanningDirectory(planning);
|
||||
assertEq(result.valid, true, 'no project: validation passes (warning only)');
|
||||
assert(result.issues.some(i => i.severity === 'warning' && i.file.includes('PROJECT')), 'no project: warning issue mentions PROJECT');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n=== Validator: complete directory → valid with no issues ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
const planning = createPlanningDir(base);
|
||||
writeFileSync(join(planning, 'ROADMAP.md'), SAMPLE_ROADMAP);
|
||||
writeFileSync(join(planning, 'PROJECT.md'), SAMPLE_PROJECT);
|
||||
writeFileSync(join(planning, 'REQUIREMENTS.md'), SAMPLE_REQUIREMENTS);
|
||||
writeFileSync(join(planning, 'STATE.md'), SAMPLE_STATE);
|
||||
mkdirSync(join(planning, 'phases'), { recursive: true });
|
||||
const result = await validatePlanningDirectory(planning);
|
||||
assertEq(result.valid, true, 'complete dir: validation passes');
|
||||
assertEq(result.issues.length, 0, 'complete dir: no issues');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Roadmap Parser Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== parseOldRoadmap: flat format ===');
|
||||
{
|
||||
const roadmap = parseOldRoadmap(SAMPLE_ROADMAP);
|
||||
assertEq(roadmap.milestones.length, 0, 'flat roadmap: no milestone sections');
|
||||
assertEq(roadmap.phases.length, 3, 'flat roadmap: 3 phases');
|
||||
assertEq(roadmap.phases[0].number, 29, 'flat roadmap: first phase number');
|
||||
assertEq(roadmap.phases[0].title, 'Auth System', 'flat roadmap: first phase title');
|
||||
assertEq(roadmap.phases[0].done, true, 'flat roadmap: first phase done');
|
||||
assertEq(roadmap.phases[1].done, false, 'flat roadmap: second phase not done');
|
||||
}
|
||||
|
||||
console.log('\n=== parseOldRoadmap: milestone-sectioned with <details> ===');
|
||||
{
|
||||
const roadmap = parseOldRoadmap(SAMPLE_MILESTONE_SECTIONED_ROADMAP);
|
||||
assert(roadmap.milestones.length >= 2, 'ms roadmap: has milestone sections');
|
||||
|
||||
const v20 = roadmap.milestones.find(m => m.id.includes('2.0'));
|
||||
assert(v20 !== undefined, 'ms roadmap: v2.0 found');
|
||||
assertEq(v20?.collapsed, true, 'ms roadmap: v2.0 collapsed');
|
||||
assert((v20?.phases.length ?? 0) >= 2, 'ms roadmap: v2.0 has phases');
|
||||
assert(v20?.phases.every(p => p.done) ?? false, 'ms roadmap: v2.0 all done');
|
||||
|
||||
const v25 = roadmap.milestones.find(m => m.id.includes('2.5'));
|
||||
assert(v25 !== undefined, 'ms roadmap: v2.5 found');
|
||||
assertEq(v25?.collapsed, false, 'ms roadmap: v2.5 not collapsed');
|
||||
assert((v25?.phases.length ?? 0) >= 3, 'ms roadmap: v2.5 has 3 phases');
|
||||
|
||||
const p29 = v25?.phases.find(p => p.number === 29);
|
||||
assertEq(p29?.done, true, 'ms roadmap: phase 29 done');
|
||||
const p30 = v25?.phases.find(p => p.number === 30);
|
||||
assertEq(p30?.done, false, 'ms roadmap: phase 30 not done');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Plan Parser Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== parseOldPlan: XML-in-markdown ===');
|
||||
{
|
||||
const plan = parseOldPlan(SAMPLE_PLAN_XML, '29-01-PLAN.md', '01');
|
||||
assert(plan.objective.includes('authentication'), 'plan: objective extracted');
|
||||
assertEq(plan.tasks.length, 3, 'plan: 3 tasks');
|
||||
assert(plan.tasks[0].includes('auth middleware'), 'plan: first task content');
|
||||
assert(plan.context.includes('JWT'), 'plan: context extracted');
|
||||
assert(plan.verification.includes('Login returns'), 'plan: verification extracted');
|
||||
assert(plan.successCriteria.includes('endpoints respond'), 'plan: success criteria extracted');
|
||||
|
||||
// Frontmatter
|
||||
assertEq(plan.frontmatter.phase, '29-auth-system', 'plan fm: phase');
|
||||
assertEq(plan.frontmatter.plan, '01', 'plan fm: plan');
|
||||
assertEq(plan.frontmatter.type, 'implementation', 'plan fm: type');
|
||||
assertEq(plan.frontmatter.wave, 1, 'plan fm: wave');
|
||||
assertEq(plan.frontmatter.autonomous, true, 'plan fm: autonomous');
|
||||
assert(plan.frontmatter.files_modified.length >= 2, 'plan fm: files_modified');
|
||||
assert(plan.frontmatter.must_haves !== null, 'plan fm: must_haves parsed');
|
||||
assert((plan.frontmatter.must_haves?.truths.length ?? 0) >= 1, 'plan fm: must_haves truths');
|
||||
assert((plan.frontmatter.must_haves?.artifacts.length ?? 0) >= 1, 'plan fm: must_haves artifacts');
|
||||
}
|
||||
|
||||
console.log('\n=== parseOldPlan: plain markdown (no XML tags) ===');
|
||||
{
|
||||
const plainPlan = `# 001: Fix Login Bug
|
||||
|
||||
## Description
|
||||
|
||||
Fix the login button not responding on mobile.
|
||||
|
||||
## Steps
|
||||
|
||||
1. Debug click handler
|
||||
2. Fix event propagation
|
||||
`;
|
||||
const plan = parseOldPlan(plainPlan, '001-PLAN.md', '001');
|
||||
assertEq(plan.objective, '', 'plain plan: no objective (no XML)');
|
||||
assertEq(plan.tasks.length, 0, 'plain plan: no tasks (no XML)');
|
||||
assertEq(plan.frontmatter.phase, '', 'plain plan: no frontmatter phase');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Summary Parser Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== parseOldSummary: YAML frontmatter ===');
|
||||
{
|
||||
const summary = parseOldSummary(SAMPLE_SUMMARY, '29-01-SUMMARY.md', '01');
|
||||
assertEq(summary.frontmatter.phase, '29-auth-system', 'summary fm: phase');
|
||||
assertEq(summary.frontmatter.plan, '01', 'summary fm: plan');
|
||||
assertEq(summary.frontmatter.subsystem, 'auth', 'summary fm: subsystem');
|
||||
assertEq(summary.frontmatter.tags, ['authentication', 'security'], 'summary fm: tags');
|
||||
assertEq(summary.frontmatter.provides, ['auth-middleware', 'jwt-validation'], 'summary fm: provides');
|
||||
assertEq(summary.frontmatter.affects, ['api-routes'], 'summary fm: affects');
|
||||
assertEq(summary.frontmatter['tech-stack'], ['jsonwebtoken', 'express'], 'summary fm: tech-stack');
|
||||
assertEq(summary.frontmatter['key-files'], ['src/auth.ts', 'src/middleware/auth.ts'], 'summary fm: key-files');
|
||||
assertEq(summary.frontmatter['key-decisions'], ['Use RS256 for JWT signing', 'Store refresh tokens in DB'], 'summary fm: key-decisions');
|
||||
assertEq(summary.frontmatter['patterns-established'], ['Middleware-based auth'], 'summary fm: patterns-established');
|
||||
assertEq(summary.frontmatter.duration, '2h', 'summary fm: duration');
|
||||
assertEq(summary.frontmatter.completed, '2026-01-15', 'summary fm: completed');
|
||||
assert(summary.body.includes('Auth Implementation Summary'), 'summary: body content present');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Requirements Parser Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== parseOldRequirements ===');
|
||||
{
|
||||
const reqs = parseOldRequirements(SAMPLE_REQUIREMENTS);
|
||||
assertEq(reqs.length, 4, 'requirements: 4 entries');
|
||||
assertEq(reqs[0].id, 'R001', 'req 0: id');
|
||||
assertEq(reqs[0].title, 'User Authentication', 'req 0: title');
|
||||
assertEq(reqs[0].status, 'active', 'req 0: status');
|
||||
assert(reqs[0].description.includes('log in'), 'req 0: description');
|
||||
assertEq(reqs[2].id, 'R003', 'req 2: id');
|
||||
assertEq(reqs[2].status, 'validated', 'req 2: status');
|
||||
assertEq(reqs[3].id, 'R004', 'req 3: id');
|
||||
assertEq(reqs[3].status, 'deferred', 'req 3: status');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// State Parser Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== parseOldState ===');
|
||||
{
|
||||
const state = parseOldState(SAMPLE_STATE);
|
||||
assert(state.currentPhase?.includes('30') ?? false, 'state: current phase includes 30');
|
||||
assertEq(state.status, 'in-progress', 'state: status');
|
||||
assert(state.raw === SAMPLE_STATE, 'state: raw preserved');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Config Parser Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== parseOldConfig: valid JSON ===');
|
||||
{
|
||||
const config = parseOldConfig('{"projectName":"test","version":"1.0"}');
|
||||
assert(config !== null, 'config: parsed');
|
||||
assertEq(config?.projectName, 'test', 'config: projectName');
|
||||
}
|
||||
|
||||
console.log('\n=== parseOldConfig: invalid JSON → null ===');
|
||||
{
|
||||
const config = parseOldConfig('not json at all {{{');
|
||||
assertEq(config, null, 'config: invalid JSON returns null');
|
||||
}
|
||||
|
||||
console.log('\n=== parseOldConfig: non-object JSON → null ===');
|
||||
{
|
||||
const config = parseOldConfig('"just a string"');
|
||||
assertEq(config, null, 'config: non-object returns null');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Project Parser Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== parseOldProject ===');
|
||||
{
|
||||
const project = parseOldProject(SAMPLE_PROJECT);
|
||||
assertEq(project, SAMPLE_PROJECT, 'project: returns raw content');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Results
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log(`\n${'='.repeat(40)}`);
|
||||
console.log(`Results: ${passed} passed, ${failed} failed`);
|
||||
if (failed > 0) {
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('All tests passed ✓');
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
@ -0,0 +1,318 @@
|
|||
// Migration writer integration test
|
||||
// Writes a complete .gsd tree to a temp dir, verifies file existence,
|
||||
// parses key files, and asserts deriveState() returns coherent state.
|
||||
// Also tests generatePreview() for correct counts.
|
||||
|
||||
import { mkdtempSync, existsSync, readFileSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { writeGSDDirectory } from '../migrate/writer.ts';
|
||||
import { generatePreview } from '../migrate/preview.ts';
|
||||
import { parseRoadmap, parsePlan, parseSummary } from '../files.ts';
|
||||
import { deriveState } from '../state.ts';
|
||||
import type {
|
||||
GSDProject,
|
||||
GSDMilestone,
|
||||
GSDSlice,
|
||||
GSDTask,
|
||||
GSDRequirement,
|
||||
} from '../migrate/types.ts';
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
function assert(condition: boolean, message: string): void {
|
||||
if (condition) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function assertEq<T>(actual: T, expected: T, message: string): void {
|
||||
if (JSON.stringify(actual) === JSON.stringify(expected)) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(` FAIL: ${message} — expected ${JSON.stringify(expected)}, got ${JSON.stringify(actual)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Fixture Builders ──────────────────────────────────────────────────────
|
||||
|
||||
function makeTask(id: string, title: string, done: boolean, hasSummary: boolean): GSDTask {
|
||||
return {
|
||||
id,
|
||||
title,
|
||||
description: `Description for ${title}`,
|
||||
done,
|
||||
estimate: done ? '1h' : '',
|
||||
files: [`src/${id.toLowerCase()}.ts`],
|
||||
mustHaves: [`${title} works correctly`],
|
||||
summary: hasSummary ? {
|
||||
completedAt: '2026-01-15',
|
||||
provides: [`${id.toLowerCase()}-feature`],
|
||||
keyFiles: [`src/${id.toLowerCase()}.ts`],
|
||||
duration: '1h',
|
||||
whatHappened: `Implemented ${title} successfully.`,
|
||||
} : null,
|
||||
};
|
||||
}
|
||||
|
||||
function makeSlice(
|
||||
id: string, title: string, done: boolean,
|
||||
tasks: GSDTask[], depends: string[],
|
||||
hasSummary: boolean,
|
||||
): GSDSlice {
|
||||
return {
|
||||
id,
|
||||
title,
|
||||
risk: 'medium' as const,
|
||||
depends,
|
||||
done,
|
||||
demo: `Demo for ${title}`,
|
||||
goal: `Goal for ${title}`,
|
||||
tasks,
|
||||
research: null,
|
||||
summary: hasSummary ? {
|
||||
completedAt: '2026-01-15',
|
||||
provides: [`${id.toLowerCase()}-capability`],
|
||||
keyFiles: tasks.map(t => `src/${t.id.toLowerCase()}.ts`),
|
||||
keyDecisions: ['Used standard patterns'],
|
||||
patternsEstablished: ['Integration pattern'],
|
||||
duration: '2h',
|
||||
whatHappened: `Completed ${title} with all tasks done.`,
|
||||
} : null,
|
||||
};
|
||||
}
|
||||
|
||||
function buildIncompleteProject(): GSDProject {
|
||||
const t01 = makeTask('T01', 'Setup Database', true, true);
|
||||
const t02 = makeTask('T02', 'Add Auth Middleware', true, true);
|
||||
const s01 = makeSlice('S01', 'Auth Foundation', true, [t01, t02], [], true);
|
||||
|
||||
const t03 = makeTask('T03', 'Build Dashboard UI', false, false);
|
||||
const s02 = makeSlice('S02', 'Dashboard', false, [t03], ['S01'], false);
|
||||
|
||||
const milestone: GSDMilestone = {
|
||||
id: 'M001',
|
||||
title: 'MVP Launch',
|
||||
vision: 'Ship the minimum viable product',
|
||||
successCriteria: ['Users can log in', 'Dashboard renders data'],
|
||||
slices: [s01, s02],
|
||||
research: '# Research\n\nMarket analysis for MVP features.\n',
|
||||
boundaryMap: [],
|
||||
};
|
||||
|
||||
const requirements: GSDRequirement[] = [
|
||||
{ id: 'R001', title: 'User Authentication', class: 'core-capability', status: 'validated', description: 'Users must authenticate.', source: 'stakeholder', primarySlice: 'S01' },
|
||||
{ id: 'R002', title: 'Dashboard View', class: 'core-capability', status: 'active', description: 'Dashboard shows data.', source: 'stakeholder', primarySlice: 'S02' },
|
||||
{ id: 'R003', title: 'Export to PDF', class: 'nice-to-have', status: 'deferred', description: 'PDF export.', source: 'inferred', primarySlice: 'none yet' },
|
||||
{ id: 'R004', title: 'Legacy Reports', class: 'deprecated', status: 'out-of-scope', description: 'Old reporting.', source: 'inferred', primarySlice: 'none yet' },
|
||||
];
|
||||
|
||||
return {
|
||||
milestones: [milestone],
|
||||
projectContent: '# My Project\n\nA test project for migration.\n',
|
||||
requirements,
|
||||
decisionsContent: '',
|
||||
};
|
||||
}
|
||||
|
||||
function buildCompleteProject(): GSDProject {
|
||||
const t01 = makeTask('T01', 'Only Task', true, true);
|
||||
const s01 = makeSlice('S01', 'Only Slice', true, [t01], [], true);
|
||||
|
||||
const milestone: GSDMilestone = {
|
||||
id: 'M001',
|
||||
title: 'Complete Milestone',
|
||||
vision: 'Everything done',
|
||||
successCriteria: ['All done'],
|
||||
slices: [s01],
|
||||
research: null,
|
||||
boundaryMap: [],
|
||||
};
|
||||
|
||||
return {
|
||||
milestones: [milestone],
|
||||
projectContent: '# Done Project\n',
|
||||
requirements: [],
|
||||
decisionsContent: '# Decisions\n\n| ID | Decision | Rationale | Date |\n',
|
||||
};
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
async function main(): Promise<void> {
|
||||
|
||||
// ─── Scenario 1: Incomplete project ────────────────────────────────────
|
||||
console.log('\n=== Scenario 1: Incomplete project — write, parse, deriveState ===');
|
||||
{
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-writer-int-'));
|
||||
try {
|
||||
const project = buildIncompleteProject();
|
||||
const result = await writeGSDDirectory(project, base);
|
||||
|
||||
// (a) Key files exist
|
||||
console.log(' --- file existence ---');
|
||||
const gsd = join(base, '.gsd');
|
||||
const m = join(gsd, 'milestones', 'M001');
|
||||
|
||||
assert(existsSync(join(m, 'M001-ROADMAP.md')), 'incomplete: M001-ROADMAP.md exists');
|
||||
assert(existsSync(join(m, 'M001-CONTEXT.md')), 'incomplete: M001-CONTEXT.md exists');
|
||||
assert(existsSync(join(m, 'M001-RESEARCH.md')), 'incomplete: M001-RESEARCH.md exists');
|
||||
assert(existsSync(join(m, 'slices', 'S01', 'S01-PLAN.md')), 'incomplete: S01-PLAN.md exists');
|
||||
assert(existsSync(join(m, 'slices', 'S02', 'S02-PLAN.md')), 'incomplete: S02-PLAN.md exists');
|
||||
assert(existsSync(join(m, 'slices', 'S01', 'S01-SUMMARY.md')), 'incomplete: S01-SUMMARY.md exists');
|
||||
assert(!existsSync(join(m, 'slices', 'S02', 'S02-SUMMARY.md')), 'incomplete: S02-SUMMARY.md NOT written (null)');
|
||||
assert(existsSync(join(gsd, 'REQUIREMENTS.md')), 'incomplete: REQUIREMENTS.md exists');
|
||||
assert(existsSync(join(gsd, 'PROJECT.md')), 'incomplete: PROJECT.md exists');
|
||||
assert(existsSync(join(gsd, 'DECISIONS.md')), 'incomplete: DECISIONS.md exists');
|
||||
assert(existsSync(join(gsd, 'STATE.md')), 'incomplete: STATE.md exists');
|
||||
|
||||
// Task files
|
||||
assert(existsSync(join(m, 'slices', 'S01', 'tasks', 'T01-PLAN.md')), 'incomplete: T01-PLAN.md exists');
|
||||
assert(existsSync(join(m, 'slices', 'S01', 'tasks', 'T01-SUMMARY.md')), 'incomplete: T01-SUMMARY.md exists');
|
||||
assert(existsSync(join(m, 'slices', 'S01', 'tasks', 'T02-PLAN.md')), 'incomplete: T02-PLAN.md exists (auth task)');
|
||||
assert(existsSync(join(m, 'slices', 'S01', 'tasks', 'T02-SUMMARY.md')), 'incomplete: T02-SUMMARY.md exists (auth task)');
|
||||
assert(existsSync(join(m, 'slices', 'S02', 'tasks', 'T03-PLAN.md')), 'incomplete: T03-PLAN.md exists');
|
||||
assert(!existsSync(join(m, 'slices', 'S02', 'tasks', 'T03-SUMMARY.md')), 'incomplete: T03-SUMMARY.md NOT written (null)');
|
||||
|
||||
// WrittenFiles counts
|
||||
console.log(' --- WrittenFiles counts ---');
|
||||
assertEq(result.counts.roadmaps, 1, 'incomplete: WrittenFiles roadmaps count');
|
||||
assertEq(result.counts.plans, 2, 'incomplete: WrittenFiles plans count');
|
||||
assertEq(result.counts.taskPlans, 3, 'incomplete: WrittenFiles taskPlans count');
|
||||
assertEq(result.counts.taskSummaries, 2, 'incomplete: WrittenFiles taskSummaries count');
|
||||
assertEq(result.counts.sliceSummaries, 1, 'incomplete: WrittenFiles sliceSummaries count');
|
||||
assertEq(result.counts.research, 1, 'incomplete: WrittenFiles research count');
|
||||
assertEq(result.counts.requirements, 1, 'incomplete: WrittenFiles requirements count');
|
||||
assertEq(result.counts.contexts, 1, 'incomplete: WrittenFiles contexts count');
|
||||
|
||||
// (b) parseRoadmap on written roadmap
|
||||
console.log(' --- parseRoadmap ---');
|
||||
const roadmapContent = readFileSync(join(m, 'M001-ROADMAP.md'), 'utf-8');
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
assertEq(roadmap.slices.length, 2, 'incomplete: roadmap has 2 slices');
|
||||
assert(roadmap.slices[0].done === true, 'incomplete: roadmap S01 is done');
|
||||
assert(roadmap.slices[1].done === false, 'incomplete: roadmap S02 is not done');
|
||||
assertEq(roadmap.slices[0].id, 'S01', 'incomplete: roadmap slice 0 id');
|
||||
assertEq(roadmap.slices[1].id, 'S02', 'incomplete: roadmap slice 1 id');
|
||||
|
||||
// (c) parsePlan on S01 plan
|
||||
console.log(' --- parsePlan S01 ---');
|
||||
const s01PlanContent = readFileSync(join(m, 'slices', 'S01', 'S01-PLAN.md'), 'utf-8');
|
||||
const s01Plan = parsePlan(s01PlanContent);
|
||||
assertEq(s01Plan.tasks.length, 2, 'incomplete: S01 plan has 2 tasks');
|
||||
assert(s01Plan.tasks[0].done === true, 'incomplete: S01 T01 is done');
|
||||
assert(s01Plan.tasks[1].done === true, 'incomplete: S01 T02 is done');
|
||||
|
||||
// (d) parseSummary on S01 summary
|
||||
console.log(' --- parseSummary S01 ---');
|
||||
const s01SummaryContent = readFileSync(join(m, 'slices', 'S01', 'S01-SUMMARY.md'), 'utf-8');
|
||||
const s01Summary = parseSummary(s01SummaryContent);
|
||||
assert(
|
||||
(s01Summary.frontmatter.key_files as string[]).length > 0,
|
||||
'incomplete: S01 summary has key_files',
|
||||
);
|
||||
assert(
|
||||
(s01Summary.frontmatter.provides as string[]).length > 0,
|
||||
'incomplete: S01 summary has provides',
|
||||
);
|
||||
|
||||
// (e) deriveState
|
||||
console.log(' --- deriveState ---');
|
||||
const state = await deriveState(base);
|
||||
assertEq(state.phase, 'executing', 'incomplete: deriveState phase is executing');
|
||||
assert(state.activeMilestone !== null, 'incomplete: deriveState has activeMilestone');
|
||||
assertEq(state.activeMilestone!.id, 'M001', 'incomplete: deriveState activeMilestone is M001');
|
||||
assert(state.activeSlice !== null, 'incomplete: deriveState has activeSlice');
|
||||
assertEq(state.activeSlice!.id, 'S02', 'incomplete: deriveState activeSlice is S02');
|
||||
assert(state.activeTask !== null, 'incomplete: deriveState has activeTask');
|
||||
assertEq(state.activeTask!.id, 'T03', 'incomplete: deriveState activeTask is T03');
|
||||
assert(state.progress.slices !== undefined, 'incomplete: deriveState has slices progress');
|
||||
assertEq(state.progress.slices!.done, 1, 'incomplete: deriveState slices done count');
|
||||
assertEq(state.progress.slices!.total, 2, 'incomplete: deriveState slices total count');
|
||||
assert(state.progress.tasks !== undefined, 'incomplete: deriveState has tasks progress');
|
||||
// S02 has 1 task, 0 done (only active slice tasks counted)
|
||||
assertEq(state.progress.tasks!.done, 0, 'incomplete: deriveState tasks done (in active slice)');
|
||||
assertEq(state.progress.tasks!.total, 1, 'incomplete: deriveState tasks total (in active slice)');
|
||||
// Requirements
|
||||
assertEq(state.requirements.active, 1, 'incomplete: deriveState requirements active');
|
||||
assertEq(state.requirements.validated, 1, 'incomplete: deriveState requirements validated');
|
||||
assertEq(state.requirements.deferred, 1, 'incomplete: deriveState requirements deferred');
|
||||
assertEq(state.requirements.outOfScope, 1, 'incomplete: deriveState requirements outOfScope');
|
||||
|
||||
// (f) generatePreview
|
||||
console.log(' --- generatePreview ---');
|
||||
const preview = generatePreview(project);
|
||||
assertEq(preview.milestoneCount, 1, 'incomplete: preview milestoneCount');
|
||||
assertEq(preview.totalSlices, 2, 'incomplete: preview totalSlices');
|
||||
assertEq(preview.totalTasks, 3, 'incomplete: preview totalTasks');
|
||||
assertEq(preview.doneSlices, 1, 'incomplete: preview doneSlices');
|
||||
assertEq(preview.doneTasks, 2, 'incomplete: preview doneTasks');
|
||||
assertEq(preview.sliceCompletionPct, 50, 'incomplete: preview sliceCompletionPct');
|
||||
assertEq(preview.taskCompletionPct, 67, 'incomplete: preview taskCompletionPct');
|
||||
assertEq(preview.requirements.active, 1, 'incomplete: preview requirements active');
|
||||
assertEq(preview.requirements.validated, 1, 'incomplete: preview requirements validated');
|
||||
assertEq(preview.requirements.deferred, 1, 'incomplete: preview requirements deferred');
|
||||
assertEq(preview.requirements.outOfScope, 1, 'incomplete: preview requirements outOfScope');
|
||||
assertEq(preview.requirements.total, 4, 'incomplete: preview requirements total');
|
||||
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Scenario 2: Fully complete project ────────────────────────────────
|
||||
console.log('\n=== Scenario 2: Fully complete project — deriveState phase ===');
|
||||
{
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-writer-int-complete-'));
|
||||
try {
|
||||
const project = buildCompleteProject();
|
||||
await writeGSDDirectory(project, base);
|
||||
|
||||
// Null research should NOT produce a file
|
||||
const m = join(base, '.gsd', 'milestones', 'M001');
|
||||
assert(!existsSync(join(m, 'M001-RESEARCH.md')), 'complete: M001-RESEARCH.md NOT written (null)');
|
||||
// No REQUIREMENTS.md since empty requirements
|
||||
assert(!existsSync(join(base, '.gsd', 'REQUIREMENTS.md')), 'complete: REQUIREMENTS.md NOT written (empty)');
|
||||
|
||||
// deriveState: all slices done, all tasks done — needs milestone summary for 'complete'
|
||||
// Without milestone summary, it should be 'completing-milestone' or 'summarizing'
|
||||
const state = await deriveState(base);
|
||||
// All slices are done in roadmap. Milestone summary doesn't exist.
|
||||
// deriveState should return 'completing-milestone' since all slices done but no milestone summary.
|
||||
assertEq(state.phase, 'completing-milestone', 'complete: deriveState phase is completing-milestone');
|
||||
assert(state.activeMilestone !== null, 'complete: deriveState has activeMilestone');
|
||||
assertEq(state.activeMilestone!.id, 'M001', 'complete: deriveState activeMilestone is M001');
|
||||
|
||||
// generatePreview for complete project
|
||||
const preview = generatePreview(project);
|
||||
assertEq(preview.milestoneCount, 1, 'complete: preview milestoneCount');
|
||||
assertEq(preview.totalSlices, 1, 'complete: preview totalSlices');
|
||||
assertEq(preview.doneSlices, 1, 'complete: preview doneSlices');
|
||||
assertEq(preview.totalTasks, 1, 'complete: preview totalTasks');
|
||||
assertEq(preview.doneTasks, 1, 'complete: preview doneTasks');
|
||||
assertEq(preview.sliceCompletionPct, 100, 'complete: preview sliceCompletionPct');
|
||||
assertEq(preview.taskCompletionPct, 100, 'complete: preview taskCompletionPct');
|
||||
assertEq(preview.requirements.total, 0, 'complete: preview requirements total');
|
||||
|
||||
} finally {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Results ─────────────────────────────────────────────────────────────
|
||||
console.log(`\n${passed + failed} assertions: ${passed} passed, ${failed} failed`);
|
||||
if (failed > 0) process.exit(1);
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('Unhandled error:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
420
src/resources/extensions/gsd/tests/migrate-writer.test.ts
Normal file
420
src/resources/extensions/gsd/tests/migrate-writer.test.ts
Normal file
|
|
@ -0,0 +1,420 @@
|
|||
// Migration writer format round-trip test suite
|
||||
// Tests that format functions produce output that parses back correctly
|
||||
// through parseRoadmap(), parsePlan(), parseSummary(), and parseRequirementCounts().
|
||||
// Pure in-memory tests — no filesystem needed.
|
||||
|
||||
import {
|
||||
formatRoadmap,
|
||||
formatPlan,
|
||||
formatSliceSummary,
|
||||
formatTaskSummary,
|
||||
formatTaskPlan,
|
||||
formatRequirements,
|
||||
formatProject,
|
||||
formatDecisions,
|
||||
formatContext,
|
||||
formatState,
|
||||
} from '../migrate/writer.ts';
|
||||
import {
|
||||
parseRoadmap,
|
||||
parsePlan,
|
||||
parseSummary,
|
||||
parseRequirementCounts,
|
||||
} from '../files.ts';
|
||||
import type {
|
||||
GSDMilestone,
|
||||
GSDSlice,
|
||||
GSDTask,
|
||||
GSDRequirement,
|
||||
GSDSliceSummaryData,
|
||||
GSDTaskSummaryData,
|
||||
} from '../migrate/types.ts';
|
||||
|
||||
let passed = 0;
|
||||
let failed = 0;
|
||||
|
||||
function assert(condition: boolean, message: string): void {
|
||||
if (condition) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(`FAIL: ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function assertEq(actual: unknown, expected: unknown, message: string): void {
|
||||
const a = JSON.stringify(actual);
|
||||
const e = JSON.stringify(expected);
|
||||
if (a === e) {
|
||||
passed++;
|
||||
} else {
|
||||
failed++;
|
||||
console.error(`FAIL: ${message} — expected ${e}, got ${a}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test Data Builders ────────────────────────────────────────────────────
|
||||
|
||||
function makeTask(overrides: Partial<GSDTask> = {}): GSDTask {
|
||||
return {
|
||||
id: 'T01',
|
||||
title: 'Setup Auth',
|
||||
description: 'Implement authentication',
|
||||
done: false,
|
||||
estimate: '30m',
|
||||
files: ['src/auth.ts'],
|
||||
mustHaves: ['JWT support'],
|
||||
summary: null,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeSlice(overrides: Partial<GSDSlice> = {}): GSDSlice {
|
||||
return {
|
||||
id: 'S01',
|
||||
title: 'Auth System',
|
||||
risk: 'medium' as const,
|
||||
depends: [],
|
||||
done: false,
|
||||
demo: 'Login flow works end-to-end',
|
||||
goal: 'Working authentication',
|
||||
tasks: [makeTask()],
|
||||
research: null,
|
||||
summary: null,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeMilestone(overrides: Partial<GSDMilestone> = {}): GSDMilestone {
|
||||
return {
|
||||
id: 'M001',
|
||||
title: 'Core Platform',
|
||||
vision: 'Build the core platform',
|
||||
successCriteria: ['All tests pass', 'Deploy to staging'],
|
||||
slices: [makeSlice()],
|
||||
research: null,
|
||||
boundaryMap: [],
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeSliceSummary(overrides: Partial<GSDSliceSummaryData> = {}): GSDSliceSummaryData {
|
||||
return {
|
||||
completedAt: '2026-03-10',
|
||||
provides: ['auth-flow', 'jwt-tokens'],
|
||||
keyFiles: ['src/auth.ts', 'src/middleware.ts'],
|
||||
keyDecisions: ['Use JWT over sessions'],
|
||||
patternsEstablished: ['Middleware pattern'],
|
||||
duration: '2h',
|
||||
whatHappened: 'Implemented full auth system with JWT.',
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeTaskSummary(overrides: Partial<GSDTaskSummaryData> = {}): GSDTaskSummaryData {
|
||||
return {
|
||||
completedAt: '2026-03-09',
|
||||
provides: ['auth-endpoint'],
|
||||
keyFiles: ['src/auth.ts'],
|
||||
duration: '45m',
|
||||
whatHappened: 'Built the auth endpoint.',
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Scenario A: Roadmap round-trip with 2 slices (1 done, 1 not)
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
{
|
||||
const milestone = makeMilestone({
|
||||
slices: [
|
||||
makeSlice({
|
||||
id: 'S01',
|
||||
title: 'Auth System',
|
||||
risk: 'high',
|
||||
depends: [],
|
||||
done: true,
|
||||
demo: 'Login flow works',
|
||||
}),
|
||||
makeSlice({
|
||||
id: 'S02',
|
||||
title: 'Dashboard',
|
||||
risk: 'low',
|
||||
depends: ['S01'],
|
||||
done: false,
|
||||
demo: 'Dashboard renders data',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const output = formatRoadmap(milestone);
|
||||
const parsed = parseRoadmap(output);
|
||||
|
||||
assertEq(parsed.title, 'M001: Core Platform', 'roadmap: title');
|
||||
assertEq(parsed.vision, 'Build the core platform', 'roadmap: vision');
|
||||
assertEq(parsed.successCriteria.length, 2, 'roadmap: successCriteria count');
|
||||
assertEq(parsed.successCriteria[0], 'All tests pass', 'roadmap: successCriteria[0]');
|
||||
assertEq(parsed.successCriteria[1], 'Deploy to staging', 'roadmap: successCriteria[1]');
|
||||
assertEq(parsed.slices.length, 2, 'roadmap: slices count');
|
||||
|
||||
assertEq(parsed.slices[0].id, 'S01', 'roadmap: S01 id');
|
||||
assertEq(parsed.slices[0].title, 'Auth System', 'roadmap: S01 title');
|
||||
assertEq(parsed.slices[0].done, true, 'roadmap: S01 done');
|
||||
assertEq(parsed.slices[0].risk, 'high', 'roadmap: S01 risk');
|
||||
assertEq(parsed.slices[0].depends.length, 0, 'roadmap: S01 depends empty');
|
||||
assertEq(parsed.slices[0].demo, 'Login flow works', 'roadmap: S01 demo');
|
||||
|
||||
assertEq(parsed.slices[1].id, 'S02', 'roadmap: S02 id');
|
||||
assertEq(parsed.slices[1].title, 'Dashboard', 'roadmap: S02 title');
|
||||
assertEq(parsed.slices[1].done, false, 'roadmap: S02 done');
|
||||
assertEq(parsed.slices[1].risk, 'low', 'roadmap: S02 risk');
|
||||
assertEq(parsed.slices[1].depends, ['S01'], 'roadmap: S02 depends');
|
||||
assertEq(parsed.slices[1].demo, 'Dashboard renders data', 'roadmap: S02 demo');
|
||||
|
||||
assertEq(parsed.boundaryMap.length, 0, 'roadmap: boundaryMap empty');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Scenario B: Plan round-trip with 3 tasks (mixed done)
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
{
|
||||
const slice = makeSlice({
|
||||
id: 'S01',
|
||||
title: 'Auth System',
|
||||
goal: 'Working authentication system',
|
||||
demo: 'Login works with valid credentials',
|
||||
tasks: [
|
||||
makeTask({ id: 'T01', title: 'Setup Models', done: true, estimate: '15m', description: 'Define user model' }),
|
||||
makeTask({ id: 'T02', title: 'Build Endpoints', done: false, estimate: '30m', description: 'REST API endpoints' }),
|
||||
makeTask({ id: 'T03', title: 'Write Tests', done: true, estimate: '20m', description: 'Unit and integration tests' }),
|
||||
],
|
||||
});
|
||||
|
||||
const output = formatPlan(slice);
|
||||
const parsed = parsePlan(output);
|
||||
|
||||
assertEq(parsed.id, 'S01', 'plan: id');
|
||||
assertEq(parsed.title, 'Auth System', 'plan: title');
|
||||
assertEq(parsed.goal, 'Working authentication system', 'plan: goal');
|
||||
assertEq(parsed.demo, 'Login works with valid credentials', 'plan: demo');
|
||||
assertEq(parsed.tasks.length, 3, 'plan: tasks count');
|
||||
|
||||
assertEq(parsed.tasks[0].id, 'T01', 'plan: T01 id');
|
||||
assertEq(parsed.tasks[0].title, 'Setup Models', 'plan: T01 title');
|
||||
assertEq(parsed.tasks[0].done, true, 'plan: T01 done');
|
||||
assertEq(parsed.tasks[0].estimate, '15m', 'plan: T01 estimate');
|
||||
|
||||
assertEq(parsed.tasks[1].id, 'T02', 'plan: T02 id');
|
||||
assertEq(parsed.tasks[1].done, false, 'plan: T02 done');
|
||||
assertEq(parsed.tasks[1].estimate, '30m', 'plan: T02 estimate');
|
||||
|
||||
assertEq(parsed.tasks[2].id, 'T03', 'plan: T03 id');
|
||||
assertEq(parsed.tasks[2].done, true, 'plan: T03 done');
|
||||
assertEq(parsed.tasks[2].estimate, '20m', 'plan: T03 estimate');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Scenario C: Slice summary round-trip with full data
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
{
|
||||
const slice = makeSlice({
|
||||
id: 'S01',
|
||||
title: 'Auth System',
|
||||
done: true,
|
||||
summary: makeSliceSummary(),
|
||||
});
|
||||
|
||||
const output = formatSliceSummary(slice, 'M001');
|
||||
const parsed = parseSummary(output);
|
||||
|
||||
assertEq(parsed.frontmatter.id, 'S01', 'sliceSummary: id');
|
||||
assertEq(parsed.frontmatter.parent, 'M001', 'sliceSummary: parent');
|
||||
assertEq(parsed.frontmatter.milestone, 'M001', 'sliceSummary: milestone');
|
||||
assertEq(parsed.frontmatter.provides, ['auth-flow', 'jwt-tokens'], 'sliceSummary: provides');
|
||||
assertEq(parsed.frontmatter.requires.length, 0, 'sliceSummary: requires empty');
|
||||
assertEq(parsed.frontmatter.affects.length, 0, 'sliceSummary: affects empty');
|
||||
assertEq(parsed.frontmatter.key_files, ['src/auth.ts', 'src/middleware.ts'], 'sliceSummary: key_files');
|
||||
assertEq(parsed.frontmatter.key_decisions, ['Use JWT over sessions'], 'sliceSummary: key_decisions');
|
||||
assertEq(parsed.frontmatter.patterns_established, ['Middleware pattern'], 'sliceSummary: patterns_established');
|
||||
assertEq(parsed.frontmatter.duration, '2h', 'sliceSummary: duration');
|
||||
assertEq(parsed.frontmatter.completed_at, '2026-03-10', 'sliceSummary: completed_at');
|
||||
assertEq(parsed.frontmatter.verification_result, 'passed', 'sliceSummary: verification_result');
|
||||
assertEq(parsed.frontmatter.blocker_discovered, false, 'sliceSummary: blocker_discovered');
|
||||
assert(parsed.whatHappened.includes('Implemented full auth system'), 'sliceSummary: whatHappened content');
|
||||
assertEq(parsed.title, 'S01: Auth System', 'sliceSummary: title');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Scenario D: Task summary round-trip
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
{
|
||||
const task = makeTask({
|
||||
id: 'T01',
|
||||
title: 'Setup Auth',
|
||||
done: true,
|
||||
summary: makeTaskSummary(),
|
||||
});
|
||||
|
||||
const output = formatTaskSummary(task, 'S01', 'M001');
|
||||
const parsed = parseSummary(output);
|
||||
|
||||
assertEq(parsed.frontmatter.id, 'T01', 'taskSummary: id');
|
||||
assertEq(parsed.frontmatter.parent, 'S01', 'taskSummary: parent');
|
||||
assertEq(parsed.frontmatter.milestone, 'M001', 'taskSummary: milestone');
|
||||
assertEq(parsed.frontmatter.provides, ['auth-endpoint'], 'taskSummary: provides');
|
||||
assertEq(parsed.frontmatter.key_files, ['src/auth.ts'], 'taskSummary: key_files');
|
||||
assertEq(parsed.frontmatter.duration, '45m', 'taskSummary: duration');
|
||||
assertEq(parsed.frontmatter.completed_at, '2026-03-09', 'taskSummary: completed_at');
|
||||
assert(parsed.whatHappened.includes('Built the auth endpoint'), 'taskSummary: whatHappened content');
|
||||
assertEq(parsed.title, 'T01: Setup Auth', 'taskSummary: title');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Scenario E: Requirements round-trip with mixed statuses
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
{
|
||||
const requirements: GSDRequirement[] = [
|
||||
{ id: 'R001', title: 'Auth Required', class: 'core-capability', status: 'active', description: 'Must have auth', source: 'spec', primarySlice: 'S01' },
|
||||
{ id: 'R002', title: 'Logging', class: 'observability', status: 'active', description: 'Must log', source: 'spec', primarySlice: 'S02' },
|
||||
{ id: 'R003', title: 'OAuth Support', class: 'core-capability', status: 'validated', description: 'OAuth working', source: 'testing', primarySlice: 'S01' },
|
||||
{ id: 'R004', title: 'Dark Mode', class: 'ui', status: 'deferred', description: 'Nice to have', source: 'feedback', primarySlice: 'none' },
|
||||
{ id: 'R005', title: 'Legacy API', class: 'compat', status: 'out-of-scope', description: 'Dropped', source: 'decision', primarySlice: 'none' },
|
||||
];
|
||||
|
||||
const output = formatRequirements(requirements);
|
||||
const counts = parseRequirementCounts(output);
|
||||
|
||||
assertEq(counts.active, 2, 'requirements: active count');
|
||||
assertEq(counts.validated, 1, 'requirements: validated count');
|
||||
assertEq(counts.deferred, 1, 'requirements: deferred count');
|
||||
assertEq(counts.outOfScope, 1, 'requirements: outOfScope count');
|
||||
assertEq(counts.total, 5, 'requirements: total count');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Scenario F: Edge cases
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
// F1: Empty vision → fallback text
|
||||
{
|
||||
const milestone = makeMilestone({ vision: '' });
|
||||
const output = formatRoadmap(milestone);
|
||||
const parsed = parseRoadmap(output);
|
||||
assertEq(parsed.vision, '(migrated project)', 'edge: empty vision fallback');
|
||||
}
|
||||
|
||||
// F2: Empty successCriteria → empty array
|
||||
{
|
||||
const milestone = makeMilestone({ successCriteria: [] });
|
||||
const output = formatRoadmap(milestone);
|
||||
const parsed = parseRoadmap(output);
|
||||
assertEq(parsed.successCriteria.length, 0, 'edge: empty successCriteria');
|
||||
}
|
||||
|
||||
// F3: Empty tasks → empty array in parsed plan
|
||||
{
|
||||
const slice = makeSlice({ tasks: [] });
|
||||
const output = formatPlan(slice);
|
||||
const parsed = parsePlan(output);
|
||||
assertEq(parsed.tasks.length, 0, 'edge: empty tasks');
|
||||
}
|
||||
|
||||
// F4: Null summary → empty string from formatSliceSummary
|
||||
{
|
||||
const slice = makeSlice({ summary: null });
|
||||
const output = formatSliceSummary(slice, 'M001');
|
||||
assertEq(output, '', 'edge: null summary returns empty string');
|
||||
}
|
||||
|
||||
// F5: Done=true checkbox in roadmap
|
||||
{
|
||||
const milestone = makeMilestone({
|
||||
slices: [makeSlice({ id: 'S01', done: true })],
|
||||
});
|
||||
const output = formatRoadmap(milestone);
|
||||
const parsed = parseRoadmap(output);
|
||||
assertEq(parsed.slices[0].done, true, 'edge: done checkbox true');
|
||||
}
|
||||
|
||||
// F6: Done=false checkbox in roadmap
|
||||
{
|
||||
const milestone = makeMilestone({
|
||||
slices: [makeSlice({ id: 'S01', done: false })],
|
||||
});
|
||||
const output = formatRoadmap(milestone);
|
||||
const parsed = parseRoadmap(output);
|
||||
assertEq(parsed.slices[0].done, false, 'edge: done checkbox false');
|
||||
}
|
||||
|
||||
// F7: Null task summary → empty string from formatTaskSummary
|
||||
{
|
||||
const task = makeTask({ summary: null });
|
||||
const output = formatTaskSummary(task, 'S01', 'M001');
|
||||
assertEq(output, '', 'edge: null task summary returns empty string');
|
||||
}
|
||||
|
||||
// F8: Empty requirements → all zeros
|
||||
{
|
||||
const output = formatRequirements([]);
|
||||
const counts = parseRequirementCounts(output);
|
||||
assertEq(counts.total, 0, 'edge: empty requirements total 0');
|
||||
}
|
||||
|
||||
// F9: formatProject with empty content → produces valid stub
|
||||
{
|
||||
const output = formatProject('');
|
||||
assert(output.includes('# Project'), 'edge: empty project has heading');
|
||||
assert(output.length > 10, 'edge: empty project not blank');
|
||||
}
|
||||
|
||||
// F10: formatProject with existing content → passes through
|
||||
{
|
||||
const content = '# My Project\n\nDescription here.\n';
|
||||
const output = formatProject(content);
|
||||
assertEq(output, content, 'edge: project passthrough');
|
||||
}
|
||||
|
||||
// F11: formatDecisions with empty content → produces valid stub
|
||||
{
|
||||
const output = formatDecisions('');
|
||||
assert(output.includes('# Decisions'), 'edge: empty decisions has heading');
|
||||
}
|
||||
|
||||
// F12: formatContext produces valid content
|
||||
{
|
||||
const output = formatContext('M001');
|
||||
assert(output.includes('M001'), 'edge: context mentions milestone');
|
||||
}
|
||||
|
||||
// F13: formatState produces valid content
|
||||
{
|
||||
const milestones = [makeMilestone({
|
||||
slices: [
|
||||
makeSlice({ done: true }),
|
||||
makeSlice({ id: 'S02', done: false }),
|
||||
],
|
||||
})];
|
||||
const output = formatState(milestones);
|
||||
assert(output.includes('1/2'), 'edge: state shows slice progress');
|
||||
}
|
||||
|
||||
// F14: Task with no estimate → no est backtick in plan
|
||||
{
|
||||
const slice = makeSlice({
|
||||
tasks: [makeTask({ id: 'T01', title: 'Quick Fix', estimate: '' })],
|
||||
});
|
||||
const output = formatPlan(slice);
|
||||
const parsed = parsePlan(output);
|
||||
assertEq(parsed.tasks[0].id, 'T01', 'edge: task no estimate id');
|
||||
assertEq(parsed.tasks[0].estimate, '', 'edge: task no estimate empty');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log(`\nResults: ${passed} passed, ${failed} failed`);
|
||||
if (failed > 0) process.exit(1);
|
||||
Loading…
Add table
Reference in a new issue