PR #2280: Tool-driven planning state capture - DB-backed planning tools (gsd_plan_milestone, gsd_plan_slice, gsd_plan_task, gsd_replan_slice, gsd_reassess_roadmap, gsd_complete_task, gsd_complete_slice) - Schema v8-v10 with planning columns on milestones, slices, tasks tables - Markdown renderer generates files from DB (DB is source of truth) - Structural enforcement: rejects mutations to completed work - Rogue file detection for planning unit types Additional fixes included: - fix(gsd): remove stale observability validator (false-positive warnings) - fix(gsd): greenfield worktree check (warning instead of hard stop) - fix(gsd): prevent planning data loss from destructive INSERT OR REPLACE (#2370) - fix(gsd): remove post-unit migrateFromMarkdown hook (was overwriting DB data) - fix(gsd): remove vestigial prompt instructions conflicting with tool workflow - fix(gsd): remove Steps section duplication in task plan renderer Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
commit
976b7459e2
85 changed files with 6146 additions and 2668 deletions
|
|
@ -482,7 +482,6 @@
|
|||
| gsd/auto-loop.ts | Auto Engine, State Machine | Execution loop state and cycle management |
|
||||
| gsd/auto-supervisor.ts | Auto Engine | Supervision and oversight of autonomous runs |
|
||||
| gsd/auto-budget.ts | Auto Engine | Token/cost budgeting and tracking |
|
||||
| gsd/auto-observability.ts | Auto Engine | Observability hooks and telemetry |
|
||||
| gsd/auto-tool-tracking.ts | Auto Engine | Tool usage instrumentation |
|
||||
| gsd/doctor.ts | Doctor/Diagnostics | Health check and system diagnostics |
|
||||
| gsd/doctor-checks.ts | Doctor/Diagnostics | Individual diagnostic checks |
|
||||
|
|
@ -978,7 +977,7 @@ Quick lookup: which files are part of each system?
|
|||
| **Config** | src/app-paths.ts, src/models-resolver.ts, src/remote-questions-config.ts, src/wizard.ts, core/defaults.ts, core/constants.ts, config.ts |
|
||||
| **Context7** | src/resources/extensions/context7/index.ts |
|
||||
| **Doctor / Diagnostics** | gsd/doctor*.ts, gsd/collision-diagnostics.ts, core/diagnostics.ts, web/lib/diagnostics-types.ts, web/app/api/doctor/*, forensics/* |
|
||||
| **Event System** | pi-coding-agent/src/core/event-bus.ts, gsd/auto-observability.ts |
|
||||
| **Event System** | pi-coding-agent/src/core/event-bus.ts |
|
||||
| **Extension Registry** | src/extension-discovery.ts, src/extension-registry.ts, src/bundled-extension-paths.ts |
|
||||
| **Extensions** | pi-coding-agent/src/core/extensions/*, src/resource-loader.ts |
|
||||
| **File Search** | native/crates/engine/src/grep.rs, glob.rs, fd.rs, fs_cache.rs, packages/native/src/grep/*, fd/*, core/tools/grep.ts, find.ts |
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-darwin-arm64",
|
||||
"version": "2.43.0",
|
||||
"version": "2.43.0-next.7",
|
||||
"description": "GSD native engine binary for macOS ARM64",
|
||||
"os": [
|
||||
"darwin"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-darwin-x64",
|
||||
"version": "2.43.0",
|
||||
"version": "2.43.0-next.7",
|
||||
"description": "GSD native engine binary for macOS Intel",
|
||||
"os": [
|
||||
"darwin"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-linux-arm64-gnu",
|
||||
"version": "2.43.0",
|
||||
"version": "2.43.0-next.7",
|
||||
"description": "GSD native engine binary for Linux ARM64 (glibc)",
|
||||
"os": [
|
||||
"linux"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-linux-x64-gnu",
|
||||
"version": "2.43.0",
|
||||
"version": "2.43.0-next.7",
|
||||
"description": "GSD native engine binary for Linux x64 (glibc)",
|
||||
"os": [
|
||||
"linux"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@gsd-build/engine-win32-x64-msvc",
|
||||
"version": "2.43.0",
|
||||
"version": "2.43.0-next.7",
|
||||
"description": "GSD native engine binary for Windows x64 (MSVC)",
|
||||
"os": [
|
||||
"win32"
|
||||
|
|
|
|||
4
package-lock.json
generated
4
package-lock.json
generated
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "gsd-pi",
|
||||
"version": "2.40.0",
|
||||
"version": "2.43.0-next.7",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "gsd-pi",
|
||||
"version": "2.40.0",
|
||||
"version": "2.43.0-next.7",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"workspaces": [
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "gsd-pi",
|
||||
"version": "2.43.0",
|
||||
"version": "2.43.0-next.7",
|
||||
"description": "GSD — Get Shit Done coding agent",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@
|
|||
|
||||
import { existsSync, readdirSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { loadFile, parseRoadmap, parsePlan, parseSummary } from "../gsd/files.js";
|
||||
import { loadFile, parseSummary } from "../gsd/files.js";
|
||||
import { parseRoadmap, parsePlan } from "../gsd/parsers-legacy.js";
|
||||
import {
|
||||
resolveMilestoneFile,
|
||||
resolveSliceFile,
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import {
|
|||
resolveMilestoneFile,
|
||||
resolveSliceFile,
|
||||
} from "./paths.js";
|
||||
import { parseRoadmap, parsePlan } from "./files.js";
|
||||
import { isDbAvailable, getMilestoneSlices, getSliceTasks } from "./gsd-db.js";
|
||||
import { readFileSync, writeFileSync, existsSync } from "node:fs";
|
||||
import { execFileSync } from "node:child_process";
|
||||
import { truncateToWidth, visibleWidth } from "@gsd/pi-tui";
|
||||
|
|
@ -248,24 +248,28 @@ let cachedSliceProgress: {
|
|||
|
||||
export function updateSliceProgressCache(base: string, mid: string, activeSid?: string): void {
|
||||
try {
|
||||
const roadmapFile = resolveMilestoneFile(base, mid, "ROADMAP");
|
||||
if (!roadmapFile) return;
|
||||
const content = readFileSync(roadmapFile, "utf-8");
|
||||
const roadmap = parseRoadmap(content);
|
||||
// Normalize slices: prefer DB, fall back to parser
|
||||
type NormSlice = { id: string; done: boolean; title: string };
|
||||
let normSlices: NormSlice[];
|
||||
if (isDbAvailable()) {
|
||||
normSlices = getMilestoneSlices(mid).map(s => ({ id: s.id, done: s.status === "complete", title: s.title }));
|
||||
} else {
|
||||
normSlices = [];
|
||||
}
|
||||
|
||||
let activeSliceTasks: { done: number; total: number } | null = null;
|
||||
let taskDetails: CachedTaskDetail[] | null = null;
|
||||
if (activeSid) {
|
||||
try {
|
||||
const planFile = resolveSliceFile(base, mid, activeSid, "PLAN");
|
||||
if (planFile && existsSync(planFile)) {
|
||||
const planContent = readFileSync(planFile, "utf-8");
|
||||
const plan = parsePlan(planContent);
|
||||
activeSliceTasks = {
|
||||
done: plan.tasks.filter(t => t.done).length,
|
||||
total: plan.tasks.length,
|
||||
};
|
||||
taskDetails = plan.tasks.map(t => ({ id: t.id, title: t.title, done: t.done }));
|
||||
if (isDbAvailable()) {
|
||||
const dbTasks = getSliceTasks(mid, activeSid);
|
||||
if (dbTasks.length > 0) {
|
||||
activeSliceTasks = {
|
||||
done: dbTasks.filter(t => t.status === "complete" || t.status === "done").length,
|
||||
total: dbTasks.length,
|
||||
};
|
||||
taskDetails = dbTasks.map(t => ({ id: t.id, title: t.title, done: t.status === "complete" || t.status === "done" }));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — just omit task count
|
||||
|
|
@ -273,8 +277,8 @@ export function updateSliceProgressCache(base: string, mid: string, activeSid?:
|
|||
}
|
||||
|
||||
cachedSliceProgress = {
|
||||
done: roadmap.slices.filter(s => s.done).length,
|
||||
total: roadmap.slices.length,
|
||||
done: normSlices.filter(s => s.done).length,
|
||||
total: normSlices.length,
|
||||
milestoneId: mid,
|
||||
activeSliceTasks,
|
||||
taskDetails,
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@ import type {
|
|||
} from "@gsd/pi-coding-agent";
|
||||
|
||||
import { deriveState } from "./state.js";
|
||||
import { loadFile, parseRoadmap } from "./files.js";
|
||||
import { loadFile } from "./files.js";
|
||||
import { isDbAvailable, getMilestoneSlices } from "./gsd-db.js";
|
||||
import {
|
||||
resolveMilestoneFile, resolveSliceFile, relSliceFile,
|
||||
} from "./paths.js";
|
||||
|
|
@ -151,19 +152,19 @@ export async function dispatchDirectPhase(
|
|||
|
||||
case "reassess":
|
||||
case "reassess-roadmap": {
|
||||
const roadmapFile = resolveMilestoneFile(base, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (!roadmapContent) {
|
||||
ctx.ui.notify("Cannot dispatch reassess-roadmap: no roadmap found.", "warning");
|
||||
// DB primary path — get completed slices
|
||||
let completedSliceIds: string[] = [];
|
||||
if (isDbAvailable()) {
|
||||
completedSliceIds = getMilestoneSlices(mid).filter(s => s.status === "complete").map(s => s.id);
|
||||
} else {
|
||||
ctx.ui.notify("Cannot dispatch reassess-roadmap: DB unavailable.", "warning");
|
||||
return;
|
||||
}
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const completedSlices = roadmap.slices.filter(s => s.done);
|
||||
if (completedSlices.length === 0) {
|
||||
if (completedSliceIds.length === 0) {
|
||||
ctx.ui.notify("Cannot dispatch reassess-roadmap: no completed slices.", "warning");
|
||||
return;
|
||||
}
|
||||
const completedSliceId = completedSlices[completedSlices.length - 1].id;
|
||||
const completedSliceId = completedSliceIds[completedSliceIds.length - 1];
|
||||
unitType = "reassess-roadmap";
|
||||
unitId = `${mid}/${completedSliceId}`;
|
||||
prompt = await buildReassessRoadmapPrompt(mid, midTitle, completedSliceId, base);
|
||||
|
|
@ -176,19 +177,18 @@ export async function dispatchDirectPhase(
|
|||
// incomplete) slice. After slice completion, state.activeSlice advances
|
||||
// to the next incomplete slice, so we find the last done slice from the
|
||||
// roadmap instead (#1693).
|
||||
const roadmapFile = resolveMilestoneFile(base, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (!roadmapContent) {
|
||||
ctx.ui.notify("Cannot dispatch run-uat: no roadmap found.", "warning");
|
||||
let uatCompletedSliceIds: string[] = [];
|
||||
if (isDbAvailable()) {
|
||||
uatCompletedSliceIds = getMilestoneSlices(mid).filter(s => s.status === "complete").map(s => s.id);
|
||||
} else {
|
||||
ctx.ui.notify("Cannot dispatch run-uat: DB unavailable.", "warning");
|
||||
return;
|
||||
}
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const completedSlices = roadmap.slices.filter(s => s.done);
|
||||
if (completedSlices.length === 0) {
|
||||
if (uatCompletedSliceIds.length === 0) {
|
||||
ctx.ui.notify("Cannot dispatch run-uat: no completed slices.", "warning");
|
||||
return;
|
||||
}
|
||||
const sid = completedSlices[completedSlices.length - 1].id;
|
||||
const sid = uatCompletedSliceIds[uatCompletedSliceIds.length - 1];
|
||||
const uatFile = resolveSliceFile(base, mid, sid, "UAT");
|
||||
if (!uatFile) {
|
||||
ctx.ui.notify("Cannot dispatch run-uat: no UAT file found.", "warning");
|
||||
|
|
|
|||
|
|
@ -12,7 +12,9 @@
|
|||
import type { GSDState } from "./types.js";
|
||||
import type { GSDPreferences } from "./preferences.js";
|
||||
import type { UatType } from "./files.js";
|
||||
import { loadFile, extractUatType, loadActiveOverrides, parseRoadmap } from "./files.js";
|
||||
import { loadFile, extractUatType, loadActiveOverrides } from "./files.js";
|
||||
import { isDbAvailable, getMilestoneSlices } from "./gsd-db.js";
|
||||
|
||||
import {
|
||||
resolveMilestoneFile,
|
||||
resolveMilestonePath,
|
||||
|
|
@ -170,12 +172,19 @@ export const DISPATCH_RULES: DispatchRule[] = [
|
|||
if (!prefs?.uat_dispatch) return null;
|
||||
|
||||
const roadmapFile = resolveMilestoneFile(basePath, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (!roadmapContent) return null;
|
||||
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
for (const slice of roadmap.slices.filter(s => s.done)) {
|
||||
const resultFile = resolveSliceFile(basePath, mid, slice.id, "UAT-RESULT");
|
||||
// DB-first: get completed slices from DB
|
||||
let completedSliceIds: string[];
|
||||
if (isDbAvailable()) {
|
||||
completedSliceIds = getMilestoneSlices(mid)
|
||||
.filter(s => s.status === "complete")
|
||||
.map(s => s.id);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (const sliceId of completedSliceIds) {
|
||||
const resultFile = resolveSliceFile(basePath, mid, sliceId, "UAT-RESULT");
|
||||
if (!resultFile) continue;
|
||||
const content = await loadFile(resultFile);
|
||||
if (!content) continue;
|
||||
|
|
@ -184,7 +193,7 @@ export const DISPATCH_RULES: DispatchRule[] = [
|
|||
if (verdict && verdict !== "pass" && verdict !== "passed") {
|
||||
return {
|
||||
action: "stop" as const,
|
||||
reason: `UAT verdict for ${slice.id} is "${verdict}" — blocking progression until resolved.\nReview the UAT result and update the verdict to PASS, or re-run /gsd auto after fixing.`,
|
||||
reason: `UAT verdict for ${sliceId} is "${verdict}" — blocking progression until resolved.\nReview the UAT result and update the verdict to PASS, or re-run /gsd auto after fixing.`,
|
||||
level: "warning" as const,
|
||||
};
|
||||
}
|
||||
|
|
@ -501,15 +510,19 @@ export const DISPATCH_RULES: DispatchRule[] = [
|
|||
// Safety guard (#1368): verify all roadmap slices have SUMMARY files before
|
||||
// allowing milestone validation. If any slice lacks a summary, the milestone
|
||||
// is not genuinely complete — something skipped earlier slices.
|
||||
const roadmapFile = resolveMilestoneFile(basePath, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
let sliceIds: string[];
|
||||
if (isDbAvailable()) {
|
||||
sliceIds = getMilestoneSlices(mid).map(s => s.id);
|
||||
} else {
|
||||
sliceIds = [];
|
||||
}
|
||||
|
||||
if (sliceIds.length > 0) {
|
||||
const missingSlices: string[] = [];
|
||||
for (const slice of roadmap.slices) {
|
||||
const summaryPath = resolveSliceFile(basePath, mid, slice.id, "SUMMARY");
|
||||
for (const sid of sliceIds) {
|
||||
const summaryPath = resolveSliceFile(basePath, mid, sid, "SUMMARY");
|
||||
if (!summaryPath || !existsSync(summaryPath)) {
|
||||
missingSlices.push(slice.id);
|
||||
missingSlices.push(sid);
|
||||
}
|
||||
}
|
||||
if (missingSlices.length > 0) {
|
||||
|
|
@ -558,15 +571,19 @@ export const DISPATCH_RULES: DispatchRule[] = [
|
|||
if (state.phase !== "completing-milestone") return null;
|
||||
|
||||
// Safety guard (#1368): verify all roadmap slices have SUMMARY files.
|
||||
const roadmapFile = resolveMilestoneFile(basePath, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
let sliceIds: string[];
|
||||
if (isDbAvailable()) {
|
||||
sliceIds = getMilestoneSlices(mid).map(s => s.id);
|
||||
} else {
|
||||
sliceIds = [];
|
||||
}
|
||||
|
||||
if (sliceIds.length > 0) {
|
||||
const missingSlices: string[] = [];
|
||||
for (const slice of roadmap.slices) {
|
||||
const summaryPath = resolveSliceFile(basePath, mid, slice.id, "SUMMARY");
|
||||
for (const sid of sliceIds) {
|
||||
const summaryPath = resolveSliceFile(basePath, mid, sid, "SUMMARY");
|
||||
if (!summaryPath || !existsSync(summaryPath)) {
|
||||
missingSlices.push(slice.id);
|
||||
missingSlices.push(sid);
|
||||
}
|
||||
}
|
||||
if (missingSlices.length > 0) {
|
||||
|
|
|
|||
|
|
@ -1,74 +0,0 @@
|
|||
/**
|
||||
* Pre-dispatch observability checks for auto-mode units.
|
||||
* Validates plan/summary file quality and builds repair instructions
|
||||
* for the agent to fix gaps before proceeding with the unit.
|
||||
*/
|
||||
|
||||
import type { ExtensionContext } from "@gsd/pi-coding-agent";
|
||||
import {
|
||||
validatePlanBoundary,
|
||||
validateExecuteBoundary,
|
||||
validateCompleteBoundary,
|
||||
formatValidationIssues,
|
||||
} from "./observability-validator.js";
|
||||
import type { ValidationIssue } from "./observability-validator.js";
|
||||
|
||||
export async function collectObservabilityWarnings(
|
||||
ctx: ExtensionContext,
|
||||
basePath: string,
|
||||
unitType: string,
|
||||
unitId: string,
|
||||
): Promise<ValidationIssue[]> {
|
||||
// Hook units have custom artifacts — skip standard observability checks
|
||||
if (unitType.startsWith("hook/")) return [];
|
||||
|
||||
const parts = unitId.split("/");
|
||||
const mid = parts[0];
|
||||
const sid = parts[1];
|
||||
const tid = parts[2];
|
||||
|
||||
if (!mid || !sid) return [];
|
||||
|
||||
let issues = [] as Awaited<ReturnType<typeof validatePlanBoundary>>;
|
||||
|
||||
if (unitType === "plan-slice") {
|
||||
issues = await validatePlanBoundary(basePath, mid, sid);
|
||||
} else if (unitType === "execute-task" && tid) {
|
||||
issues = await validateExecuteBoundary(basePath, mid, sid, tid);
|
||||
} else if (unitType === "complete-slice") {
|
||||
issues = await validateCompleteBoundary(basePath, mid, sid);
|
||||
}
|
||||
|
||||
if (issues.length > 0) {
|
||||
ctx.ui.notify(
|
||||
`Observability check (${unitType}) found ${issues.length} warning${issues.length === 1 ? "" : "s"}:\n${formatValidationIssues(issues)}`,
|
||||
"warning",
|
||||
);
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
export function buildObservabilityRepairBlock(issues: ValidationIssue[]): string {
|
||||
if (issues.length === 0) return "";
|
||||
const items = issues.map(issue => {
|
||||
const fileName = issue.file.split("/").pop() || issue.file;
|
||||
let line = `- **${fileName}**: ${issue.message}`;
|
||||
if (issue.suggestion) line += ` → ${issue.suggestion}`;
|
||||
return line;
|
||||
});
|
||||
return [
|
||||
"",
|
||||
"---",
|
||||
"",
|
||||
"## Pre-flight: Observability gaps to fix FIRST",
|
||||
"",
|
||||
"The following issues were detected in plan/summary files for this unit.",
|
||||
"**Read each flagged file, apply the fix described, then proceed with the unit.**",
|
||||
"",
|
||||
...items,
|
||||
"",
|
||||
"---",
|
||||
"",
|
||||
].join("\n");
|
||||
}
|
||||
|
|
@ -38,7 +38,7 @@ import { writeUnitRuntimeRecord, clearUnitRuntimeRecord } from "./unit-runtime.j
|
|||
import { runGSDDoctor, rebuildState, summarizeDoctorIssues } from "./doctor.js";
|
||||
import { recordHealthSnapshot, checkHealEscalation } from "./doctor-proactive.js";
|
||||
import { syncStateToProjectRoot } from "./auto-worktree-sync.js";
|
||||
import { isDbAvailable, getTask, getSlice, updateTaskStatus } from "./gsd-db.js";
|
||||
import { isDbAvailable, getTask, getSlice, getMilestone, updateTaskStatus } from "./gsd-db.js";
|
||||
import { renderPlanCheckboxes } from "./markdown-renderer.js";
|
||||
import { consumeSignal } from "./session-status-io.js";
|
||||
import {
|
||||
|
|
@ -111,6 +111,42 @@ export function detectRogueFileWrites(
|
|||
if (!dbRow || dbRow.status !== "complete") {
|
||||
rogues.push({ path: summaryPath, unitType, unitId });
|
||||
}
|
||||
} else if (unitType === "plan-milestone") {
|
||||
const [mid] = parts;
|
||||
if (!mid) return [];
|
||||
|
||||
const roadmapPath = resolveMilestoneFile(basePath, mid, "ROADMAP");
|
||||
if (!roadmapPath || !existsSync(roadmapPath)) return [];
|
||||
|
||||
const dbRow = getMilestone(mid);
|
||||
const hasPlanningState = !!dbRow && (
|
||||
String(dbRow.title || "").trim().length > 0 ||
|
||||
String(dbRow.vision || "").trim().length > 0 ||
|
||||
String(dbRow.requirement_coverage || "").trim().length > 0 ||
|
||||
String(dbRow.boundary_map_markdown || "").trim().length > 0
|
||||
);
|
||||
|
||||
if (!hasPlanningState) {
|
||||
rogues.push({ path: roadmapPath, unitType, unitId });
|
||||
}
|
||||
} else if (unitType === "plan-slice" || unitType === "replan-slice") {
|
||||
const [mid, sid] = parts;
|
||||
if (!mid || !sid) return [];
|
||||
|
||||
const planPath = resolveSliceFile(basePath, mid, sid, "PLAN");
|
||||
if (!planPath || !existsSync(planPath)) return [];
|
||||
|
||||
const dbRow = getSlice(mid, sid);
|
||||
const hasPlanningState = !!dbRow && (
|
||||
String(dbRow.title || "").trim().length > 0 ||
|
||||
String(dbRow.demo || "").trim().length > 0 ||
|
||||
String(dbRow.risk || "").trim().length > 0 ||
|
||||
String(dbRow.depends || "").trim().length > 0
|
||||
);
|
||||
|
||||
if (!hasPlanningState) {
|
||||
rogues.push({ path: planPath, unitType, unitId });
|
||||
}
|
||||
}
|
||||
|
||||
return rogues;
|
||||
|
|
@ -488,16 +524,6 @@ export async function postUnitPreVerification(pctx: PostUnitContext, opts?: PreV
|
|||
export async function postUnitPostVerification(pctx: PostUnitContext): Promise<"continue" | "step-wizard" | "stopped"> {
|
||||
const { s, ctx, pi, buildSnapshotOpts, lockBase, stopAuto, pauseAuto, updateProgressWidget } = pctx;
|
||||
|
||||
// ── DB dual-write ──
|
||||
if (isDbAvailable()) {
|
||||
try {
|
||||
const { migrateFromMarkdown } = await import("./md-importer.js");
|
||||
migrateFromMarkdown(s.basePath);
|
||||
} catch (err) {
|
||||
process.stderr.write(`gsd-db: re-import failed: ${(err as Error).message}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Post-unit hooks ──
|
||||
if (s.currentUnit && !s.stepMode) {
|
||||
const hookUnit = checkPostUnitHooks(s.currentUnit.type, s.currentUnit.id, s.basePath);
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
* utility.
|
||||
*/
|
||||
|
||||
import { loadFile, parseContinue, parsePlan, parseRoadmap, parseSummary, extractUatType, loadActiveOverrides, formatOverridesSection, parseTaskPlanFile } from "./files.js";
|
||||
import { loadFile, parseContinue, parseSummary, extractUatType, loadActiveOverrides, formatOverridesSection, parseTaskPlanFile } from "./files.js";
|
||||
import type { Override, UatType } from "./files.js";
|
||||
import { loadPrompt, inlineTemplate } from "./prompt-loader.js";
|
||||
import {
|
||||
|
|
@ -177,17 +177,25 @@ export async function inlineFileSmart(
|
|||
export async function inlineDependencySummaries(
|
||||
mid: string, sid: string, base: string, budgetChars?: number,
|
||||
): Promise<string> {
|
||||
const roadmapFile = resolveMilestoneFile(base, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (!roadmapContent) return "- (no dependencies)";
|
||||
// DB primary path — get slice depends directly
|
||||
let depends: string[] | null = null;
|
||||
try {
|
||||
const { isDbAvailable, getSlice } = await import("./gsd-db.js");
|
||||
if (isDbAvailable()) {
|
||||
const slice = getSlice(mid, sid);
|
||||
if (!slice || slice.depends.length === 0) return "- (no dependencies)";
|
||||
depends = slice.depends as string[];
|
||||
}
|
||||
} catch { /* fall through */ }
|
||||
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const sliceEntry = roadmap.slices.find(s => s.id === sid);
|
||||
if (!sliceEntry || sliceEntry.depends.length === 0) return "- (no dependencies)";
|
||||
// If DB didn't provide depends, we can't determine them without parsers
|
||||
if (!depends) {
|
||||
return "- (no dependencies)";
|
||||
}
|
||||
|
||||
const sections: string[] = [];
|
||||
const seen = new Set<string>();
|
||||
for (const dep of sliceEntry.depends) {
|
||||
for (const dep of depends) {
|
||||
if (seen.has(dep)) continue;
|
||||
seen.add(dep);
|
||||
const summaryFile = resolveSliceFile(base, mid, dep, "SUMMARY");
|
||||
|
|
@ -676,31 +684,29 @@ export async function getDependencyTaskSummaryPaths(
|
|||
export async function checkNeedsReassessment(
|
||||
base: string, mid: string, state: GSDState,
|
||||
): Promise<{ sliceId: string } | null> {
|
||||
const roadmapFile = resolveMilestoneFile(base, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (!roadmapContent) return null;
|
||||
// DB primary path
|
||||
let completedSliceIds: string[] = [];
|
||||
let hasIncomplete = false;
|
||||
try {
|
||||
const { isDbAvailable, getMilestoneSlices } = await import("./gsd-db.js");
|
||||
if (isDbAvailable()) {
|
||||
const slices = getMilestoneSlices(mid);
|
||||
completedSliceIds = slices.filter(s => s.status === "complete").map(s => s.id);
|
||||
hasIncomplete = slices.some(s => s.status !== "complete");
|
||||
if (completedSliceIds.length === 0 || !hasIncomplete) return null;
|
||||
const lastCompleted = completedSliceIds[completedSliceIds.length - 1];
|
||||
const assessmentFile = resolveSliceFile(base, mid, lastCompleted, "ASSESSMENT");
|
||||
const hasAssessment = !!(assessmentFile && await loadFile(assessmentFile));
|
||||
if (hasAssessment) return null;
|
||||
const summaryFile = resolveSliceFile(base, mid, lastCompleted, "SUMMARY");
|
||||
const hasSummary = !!(summaryFile && await loadFile(summaryFile));
|
||||
if (!hasSummary) return null;
|
||||
return { sliceId: lastCompleted };
|
||||
}
|
||||
} catch { /* fall through */ }
|
||||
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const completedSlices = roadmap.slices.filter(s => s.done);
|
||||
const incompleteSlices = roadmap.slices.filter(s => !s.done);
|
||||
|
||||
// No completed slices or all slices done — skip
|
||||
if (completedSlices.length === 0 || incompleteSlices.length === 0) return null;
|
||||
|
||||
// Check the last completed slice
|
||||
const lastCompleted = completedSlices[completedSlices.length - 1];
|
||||
const assessmentFile = resolveSliceFile(base, mid, lastCompleted.id, "ASSESSMENT");
|
||||
const hasAssessment = !!(assessmentFile && await loadFile(assessmentFile));
|
||||
|
||||
if (hasAssessment) return null;
|
||||
|
||||
// Also need a summary to reassess against
|
||||
const summaryFile = resolveSliceFile(base, mid, lastCompleted.id, "SUMMARY");
|
||||
const hasSummary = !!(summaryFile && await loadFile(summaryFile));
|
||||
|
||||
if (!hasSummary) return null;
|
||||
|
||||
return { sliceId: lastCompleted.id };
|
||||
// DB unavailable — cannot determine assessment needs
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -717,44 +723,34 @@ export async function checkNeedsReassessment(
|
|||
export async function checkNeedsRunUat(
|
||||
base: string, mid: string, state: GSDState, prefs: GSDPreferences | undefined,
|
||||
): Promise<{ sliceId: string; uatType: UatType } | null> {
|
||||
const roadmapFile = resolveMilestoneFile(base, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (!roadmapContent) return null;
|
||||
// DB primary path
|
||||
try {
|
||||
const { isDbAvailable, getMilestoneSlices } = await import("./gsd-db.js");
|
||||
if (isDbAvailable()) {
|
||||
const slices = getMilestoneSlices(mid);
|
||||
const completedSlices = slices.filter(s => s.status === "complete");
|
||||
const incompleteSlices = slices.filter(s => s.status !== "complete");
|
||||
if (completedSlices.length === 0) return null;
|
||||
if (incompleteSlices.length === 0) return null;
|
||||
if (!prefs?.uat_dispatch) return null;
|
||||
const lastCompleted = completedSlices[completedSlices.length - 1];
|
||||
const sid = lastCompleted.id;
|
||||
const uatFile = resolveSliceFile(base, mid, sid, "UAT");
|
||||
if (!uatFile) return null;
|
||||
const uatContent = await loadFile(uatFile);
|
||||
if (!uatContent) return null;
|
||||
const uatResultFile = resolveSliceFile(base, mid, sid, "UAT-RESULT");
|
||||
if (uatResultFile) {
|
||||
const hasResult = !!(await loadFile(uatResultFile));
|
||||
if (hasResult) return null;
|
||||
}
|
||||
const uatType = extractUatType(uatContent) ?? "artifact-driven";
|
||||
return { sliceId: sid, uatType };
|
||||
}
|
||||
} catch { /* fall through */ }
|
||||
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const completedSlices = roadmap.slices.filter(s => s.done);
|
||||
const incompleteSlices = roadmap.slices.filter(s => !s.done);
|
||||
|
||||
// No completed slices — nothing to UAT yet
|
||||
if (completedSlices.length === 0) return null;
|
||||
|
||||
// All slices done — milestone complete path, skip (reassessment handles)
|
||||
if (incompleteSlices.length === 0) return null;
|
||||
|
||||
// uat_dispatch must be opted in
|
||||
if (!prefs?.uat_dispatch) return null;
|
||||
|
||||
// Take the last completed slice
|
||||
const lastCompleted = completedSlices[completedSlices.length - 1];
|
||||
const sid = lastCompleted.id;
|
||||
|
||||
// UAT file must exist
|
||||
const uatFile = resolveSliceFile(base, mid, sid, "UAT");
|
||||
if (!uatFile) return null;
|
||||
const uatContent = await loadFile(uatFile);
|
||||
if (!uatContent) return null;
|
||||
|
||||
// If UAT result already exists, skip (idempotent)
|
||||
const uatResultFile = resolveSliceFile(base, mid, sid, "UAT-RESULT");
|
||||
if (uatResultFile) {
|
||||
const hasResult = !!(await loadFile(uatResultFile));
|
||||
if (hasResult) return null;
|
||||
}
|
||||
|
||||
// Classify UAT type; default to artifact-driven (LLM-executed UATs are always artifact-driven)
|
||||
const uatType = extractUatType(uatContent) ?? "artifact-driven";
|
||||
|
||||
return { sliceId: sid, uatType };
|
||||
// DB unavailable — cannot determine UAT needs
|
||||
return null;
|
||||
}
|
||||
|
||||
// ─── Prompt Builders ──────────────────────────────────────────────────────
|
||||
|
|
@ -1204,17 +1200,21 @@ export async function buildCompleteMilestonePrompt(
|
|||
inlined.push(await inlineFile(roadmapPath, roadmapRel, "Milestone Roadmap"));
|
||||
|
||||
// Inline all slice summaries (deduplicated by slice ID)
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const seenSlices = new Set<string>();
|
||||
for (const slice of roadmap.slices) {
|
||||
if (seenSlices.has(slice.id)) continue;
|
||||
seenSlices.add(slice.id);
|
||||
const summaryPath = resolveSliceFile(base, mid, slice.id, "SUMMARY");
|
||||
const summaryRel = relSliceFile(base, mid, slice.id, "SUMMARY");
|
||||
inlined.push(await inlineFile(summaryPath, summaryRel, `${slice.id} Summary`));
|
||||
let sliceIds: string[] = [];
|
||||
try {
|
||||
const { isDbAvailable, getMilestoneSlices } = await import("./gsd-db.js");
|
||||
if (isDbAvailable()) {
|
||||
sliceIds = getMilestoneSlices(mid).map(s => s.id);
|
||||
}
|
||||
} catch { /* fall through */ }
|
||||
// If DB didn't provide slice IDs, sliceIds stays empty — no summaries to inline
|
||||
const seenSlices = new Set<string>();
|
||||
for (const sid of sliceIds) {
|
||||
if (seenSlices.has(sid)) continue;
|
||||
seenSlices.add(sid);
|
||||
const summaryPath = resolveSliceFile(base, mid, sid, "SUMMARY");
|
||||
const summaryRel = relSliceFile(base, mid, sid, "SUMMARY");
|
||||
inlined.push(await inlineFile(summaryPath, summaryRel, `${sid} Summary`));
|
||||
}
|
||||
|
||||
// Inline root GSD files (skip for minimal — completion can read these if needed)
|
||||
|
|
@ -1260,22 +1260,26 @@ export async function buildValidateMilestonePrompt(
|
|||
inlined.push(await inlineFile(roadmapPath, roadmapRel, "Milestone Roadmap"));
|
||||
|
||||
// Inline all slice summaries and UAT results
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const seenSlices = new Set<string>();
|
||||
for (const slice of roadmap.slices) {
|
||||
if (seenSlices.has(slice.id)) continue;
|
||||
seenSlices.add(slice.id);
|
||||
const summaryPath = resolveSliceFile(base, mid, slice.id, "SUMMARY");
|
||||
const summaryRel = relSliceFile(base, mid, slice.id, "SUMMARY");
|
||||
inlined.push(await inlineFile(summaryPath, summaryRel, `${slice.id} Summary`));
|
||||
|
||||
const uatPath = resolveSliceFile(base, mid, slice.id, "UAT-RESULT");
|
||||
const uatRel = relSliceFile(base, mid, slice.id, "UAT-RESULT");
|
||||
const uatInline = await inlineFileOptional(uatPath, uatRel, `${slice.id} UAT Result`);
|
||||
if (uatInline) inlined.push(uatInline);
|
||||
let valSliceIds: string[] = [];
|
||||
try {
|
||||
const { isDbAvailable, getMilestoneSlices } = await import("./gsd-db.js");
|
||||
if (isDbAvailable()) {
|
||||
valSliceIds = getMilestoneSlices(mid).map(s => s.id);
|
||||
}
|
||||
} catch { /* fall through */ }
|
||||
// If DB didn't provide slice IDs, valSliceIds stays empty
|
||||
const seenValSlices = new Set<string>();
|
||||
for (const sid of valSliceIds) {
|
||||
if (seenValSlices.has(sid)) continue;
|
||||
seenValSlices.add(sid);
|
||||
const summaryPath = resolveSliceFile(base, mid, sid, "SUMMARY");
|
||||
const summaryRel = relSliceFile(base, mid, sid, "SUMMARY");
|
||||
inlined.push(await inlineFile(summaryPath, summaryRel, `${sid} Summary`));
|
||||
|
||||
const uatPath = resolveSliceFile(base, mid, sid, "UAT-RESULT");
|
||||
const uatRel = relSliceFile(base, mid, sid, "UAT-RESULT");
|
||||
const uatInline = await inlineFileOptional(uatPath, uatRel, `${sid} UAT Result`);
|
||||
if (uatInline) inlined.push(uatInline);
|
||||
}
|
||||
|
||||
// Inline existing VALIDATION file if this is a re-validation round
|
||||
|
|
@ -1582,16 +1586,28 @@ export async function buildRewriteDocsPrompt(
|
|||
docList.push(`- Slice plan: \`${slicePlanRel}\``);
|
||||
const tDir = resolveTasksDir(base, mid, sid);
|
||||
if (tDir) {
|
||||
const planContent = await loadFile(slicePlanPath);
|
||||
if (planContent) {
|
||||
const plan = parsePlan(planContent);
|
||||
for (const task of plan.tasks) {
|
||||
if (!task.done) {
|
||||
const taskPlanPath = resolveTaskFile(base, mid, sid, task.id, "PLAN");
|
||||
if (taskPlanPath) {
|
||||
const taskRelPath = `${relSlicePath(base, mid, sid)}/tasks/${task.id}-PLAN.md`;
|
||||
docList.push(`- Task plan: \`${taskRelPath}\``);
|
||||
}
|
||||
// DB primary path — get incomplete tasks
|
||||
let incompleteTasks: { id: string }[] | null = null;
|
||||
try {
|
||||
const { isDbAvailable, getSliceTasks } = await import("./gsd-db.js");
|
||||
if (isDbAvailable()) {
|
||||
incompleteTasks = getSliceTasks(mid, sid)
|
||||
.filter(t => t.status !== "complete" && t.status !== "done")
|
||||
.map(t => ({ id: t.id }));
|
||||
}
|
||||
} catch { /* fall through */ }
|
||||
|
||||
if (!incompleteTasks) {
|
||||
// DB unavailable — no task data to inline
|
||||
incompleteTasks = [];
|
||||
}
|
||||
|
||||
if (incompleteTasks) {
|
||||
for (const task of incompleteTasks) {
|
||||
const taskPlanPath = resolveTaskFile(base, mid, sid, task.id, "PLAN");
|
||||
if (taskPlanPath) {
|
||||
const taskRelPath = `${relSlicePath(base, mid, sid)}/tasks/${task.id}-PLAN.md`;
|
||||
docList.push(`- Task plan: \`${taskRelPath}\``);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,8 +11,9 @@ import type { ExtensionContext } from "@gsd/pi-coding-agent";
|
|||
import { parseUnitId } from "./unit-id.js";
|
||||
import { atomicWriteSync } from "./atomic-write.js";
|
||||
import { clearUnitRuntimeRecord } from "./unit-runtime.js";
|
||||
import { clearParseCache, parseRoadmap, parsePlan } from "./files.js";
|
||||
import { isDbAvailable, getTask, getSlice } from "./gsd-db.js";
|
||||
import { clearParseCache } from "./files.js";
|
||||
import { parseRoadmap as parseLegacyRoadmap, parsePlan as parseLegacyPlan } from "./parsers-legacy.js";
|
||||
import { isDbAvailable, getTask, getSlice, getSliceTasks } from "./gsd-db.js";
|
||||
import { isValidationTerminal } from "./state.js";
|
||||
import {
|
||||
nativeConflictFiles,
|
||||
|
|
@ -366,13 +367,27 @@ export function verifyExpectedArtifact(
|
|||
const sid = parts[1];
|
||||
if (mid && sid) {
|
||||
try {
|
||||
const planContent = readFileSync(absPath, "utf-8");
|
||||
const plan = parsePlan(planContent);
|
||||
const tasksDir = resolveTasksDir(base, mid, sid);
|
||||
if (plan.tasks.length > 0 && tasksDir) {
|
||||
for (const task of plan.tasks) {
|
||||
const taskPlanFile = join(tasksDir, `${task.id}-PLAN.md`);
|
||||
if (!existsSync(taskPlanFile)) return false;
|
||||
// DB primary path — get task IDs to verify task plan files exist
|
||||
let taskIds: string[] | null = null;
|
||||
if (isDbAvailable()) {
|
||||
const tasks = getSliceTasks(mid, sid);
|
||||
if (tasks.length > 0) taskIds = tasks.map(t => t.id);
|
||||
}
|
||||
|
||||
if (!taskIds) {
|
||||
// DB unavailable or no tasks in DB — parse plan file for task IDs
|
||||
const planContent = readFileSync(absPath, "utf-8");
|
||||
const plan = parseLegacyPlan(planContent);
|
||||
if (plan.tasks.length > 0) taskIds = plan.tasks.map((t: { id: string }) => t.id);
|
||||
}
|
||||
|
||||
if (taskIds && taskIds.length > 0) {
|
||||
const tasksDir = resolveTasksDir(base, mid, sid);
|
||||
if (tasksDir) {
|
||||
for (const tid of taskIds) {
|
||||
const taskPlanFile = join(tasksDir, `${tid}-PLAN.md`);
|
||||
if (!existsSync(taskPlanFile)) return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
|
|
@ -399,12 +414,12 @@ export function verifyExpectedArtifact(
|
|||
// DB available — trust it
|
||||
if (dbSlice.status !== "complete") return false;
|
||||
} else if (!isDbAvailable()) {
|
||||
// DB unavailable — fall back to roadmap checkbox check
|
||||
// DB unavailable — fall back to roadmap checkbox check via parsers-legacy
|
||||
const roadmapFile = resolveMilestoneFile(base, mid, "ROADMAP");
|
||||
if (roadmapFile && existsSync(roadmapFile)) {
|
||||
try {
|
||||
const roadmapContent = readFileSync(roadmapFile, "utf-8");
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const roadmap = parseLegacyRoadmap(roadmapContent);
|
||||
const slice = roadmap.slices.find((s) => s.id === sid);
|
||||
if (slice && !slice.done) return false;
|
||||
} catch {
|
||||
|
|
|
|||
|
|
@ -11,8 +11,8 @@
|
|||
*/
|
||||
|
||||
import type { ExtensionContext, ExtensionAPI } from "@gsd/pi-coding-agent";
|
||||
import { loadFile, parsePlan } from "./files.js";
|
||||
import { resolveSliceFile, resolveSlicePath } from "./paths.js";
|
||||
import { isDbAvailable, getTask } from "./gsd-db.js";
|
||||
import { loadEffectiveGSDPreferences } from "./preferences.js";
|
||||
import {
|
||||
runVerificationGate,
|
||||
|
|
@ -64,15 +64,10 @@ export async function runPostUnitVerification(
|
|||
let taskPlanVerify: string | undefined;
|
||||
if (parts.length >= 3) {
|
||||
const [mid, sid, tid] = parts;
|
||||
const planFile = resolveSliceFile(s.basePath, mid, sid, "PLAN");
|
||||
if (planFile) {
|
||||
const planContent = await loadFile(planFile);
|
||||
if (planContent) {
|
||||
const slicePlan = parsePlan(planContent);
|
||||
const taskEntry = slicePlan?.tasks?.find((t) => t.id === tid);
|
||||
taskPlanVerify = taskEntry?.verify;
|
||||
}
|
||||
if (isDbAvailable()) {
|
||||
taskPlanVerify = getTask(mid, sid, tid)?.verify;
|
||||
}
|
||||
// When DB unavailable, taskPlanVerify stays undefined — gate runs without task-specific checks
|
||||
}
|
||||
|
||||
const result = runVerificationGate({
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ import { GSDError, GSD_IO_ERROR, GSD_GIT_ERROR } from "./errors.js";
|
|||
import {
|
||||
reconcileWorktreeDb,
|
||||
isDbAvailable,
|
||||
getMilestone,
|
||||
getMilestoneSlices,
|
||||
} from "./gsd-db.js";
|
||||
import { atomicWriteSync } from "./atomic-write.js";
|
||||
import { execFileSync } from "node:child_process";
|
||||
|
|
@ -40,7 +42,6 @@ import {
|
|||
} from "./worktree.js";
|
||||
import { MergeConflictError, readIntegrationBranch, RUNTIME_EXCLUSION_PATHS } from "./git-service.js";
|
||||
import { debugLog } from "./debug-logger.js";
|
||||
import { parseRoadmap } from "./files.js";
|
||||
import { loadEffectiveGSDPreferences } from "./preferences.js";
|
||||
import {
|
||||
nativeGetCurrentBranch,
|
||||
|
|
@ -998,9 +999,14 @@ export function mergeMilestoneToMain(
|
|||
}
|
||||
}
|
||||
|
||||
// 2. Parse roadmap for slice listing
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const completedSlices = roadmap.slices.filter((s) => s.done);
|
||||
// 2. Get completed slices for commit message
|
||||
let completedSlices: { id: string; title: string }[] = [];
|
||||
if (isDbAvailable()) {
|
||||
completedSlices = getMilestoneSlices(milestoneId)
|
||||
.filter(s => s.status === "complete")
|
||||
.map(s => ({ id: s.id, title: s.title }));
|
||||
}
|
||||
// When DB unavailable, completedSlices stays empty — commit message will omit slice details
|
||||
|
||||
// 3. chdir to original base
|
||||
const previousCwd = process.cwd();
|
||||
|
|
@ -1030,8 +1036,9 @@ export function mergeMilestoneToMain(
|
|||
}
|
||||
|
||||
// 6. Build rich commit message
|
||||
const dbMilestone = getMilestone(milestoneId);
|
||||
const milestoneTitle =
|
||||
roadmap.title.replace(/^M\d+:\s*/, "").trim() || milestoneId;
|
||||
(dbMilestone?.title ?? "").replace(/^M\d+:\s*/, "").trim() || milestoneId;
|
||||
const subject = `feat(${milestoneId}): ${milestoneTitle}`;
|
||||
let body = "";
|
||||
if (completedSlices.length > 0) {
|
||||
|
|
|
|||
|
|
@ -79,10 +79,6 @@ import {
|
|||
getOldestInFlightToolStart,
|
||||
clearInFlightTools,
|
||||
} from "./auto-tool-tracking.js";
|
||||
import {
|
||||
collectObservabilityWarnings as _collectObservabilityWarnings,
|
||||
buildObservabilityRepairBlock,
|
||||
} from "./auto-observability.js";
|
||||
import { closeoutUnit } from "./auto-unit-closeout.js";
|
||||
import { recoverTimedOutUnit } from "./auto-timeout-recovery.js";
|
||||
import { selfHealRuntimeRecords } from "./auto-recovery.js";
|
||||
|
|
@ -961,9 +957,6 @@ function buildLoopDeps(): LoopDeps {
|
|||
runPreDispatchHooks,
|
||||
getPriorSliceCompletionBlocker,
|
||||
getMainBranch,
|
||||
collectObservabilityWarnings: _collectObservabilityWarnings,
|
||||
buildObservabilityRepairBlock,
|
||||
|
||||
// Unit closeout + runtime records
|
||||
closeoutUnit,
|
||||
verifyExpectedArtifact,
|
||||
|
|
|
|||
|
|
@ -170,14 +170,6 @@ export interface LoopDeps {
|
|||
unitId: string,
|
||||
) => string | null;
|
||||
getMainBranch: (basePath: string) => string;
|
||||
collectObservabilityWarnings: (
|
||||
ctx: ExtensionContext,
|
||||
basePath: string,
|
||||
unitType: string,
|
||||
unitId: string,
|
||||
) => Promise<unknown[]>;
|
||||
buildObservabilityRepairBlock: (issues: unknown[]) => string | null;
|
||||
|
||||
// Unit closeout + runtime records
|
||||
closeoutUnit: (
|
||||
ctx: ExtensionContext,
|
||||
|
|
|
|||
|
|
@ -161,7 +161,6 @@ export async function autoLoop(
|
|||
prompt: step.prompt,
|
||||
finalPrompt: step.prompt,
|
||||
pauseAfterUatDispatch: false,
|
||||
observabilityIssues: [],
|
||||
state: gsdState,
|
||||
mid: s.currentMilestoneId ?? "workflow",
|
||||
midTitle: "Workflow",
|
||||
|
|
@ -234,7 +233,6 @@ export async function autoLoop(
|
|||
prompt: sidecarItem.prompt,
|
||||
finalPrompt: sidecarItem.prompt,
|
||||
pauseAfterUatDispatch: false,
|
||||
observabilityIssues: [],
|
||||
state: sidecarState,
|
||||
mid: sidecarState.activeMilestone?.id,
|
||||
midTitle: sidecarState.activeMilestone?.title,
|
||||
|
|
|
|||
|
|
@ -635,18 +635,11 @@ export async function runDispatch(
|
|||
return { action: "break", reason: "prior-slice-blocker" };
|
||||
}
|
||||
|
||||
const observabilityIssues = await deps.collectObservabilityWarnings(
|
||||
ctx,
|
||||
s.basePath,
|
||||
unitType,
|
||||
unitId,
|
||||
);
|
||||
|
||||
return {
|
||||
action: "next",
|
||||
data: {
|
||||
unitType, unitId, prompt, finalPrompt: prompt,
|
||||
pauseAfterUatDispatch, observabilityIssues,
|
||||
pauseAfterUatDispatch,
|
||||
state, mid, midTitle,
|
||||
isRetry: false, previousTier: undefined,
|
||||
hookModelOverride: preDispatchResult.model,
|
||||
|
|
@ -807,7 +800,7 @@ export async function runUnitPhase(
|
|||
sidecarItem?: SidecarItem,
|
||||
): Promise<PhaseResult<{ unitStartedAt: number }>> {
|
||||
const { ctx, pi, s, deps, prefs } = ic;
|
||||
const { unitType, unitId, prompt, observabilityIssues, state, mid } = iterData;
|
||||
const { unitType, unitId, prompt, state, mid } = iterData;
|
||||
|
||||
debugLog("autoLoop", {
|
||||
phase: "unit-execution",
|
||||
|
|
@ -835,11 +828,11 @@ export async function runUnitPhase(
|
|||
const hasProjectFile = PROJECT_FILES.some((f) => deps.existsSync(join(s.basePath, f)));
|
||||
const hasSrcDir = deps.existsSync(join(s.basePath, "src"));
|
||||
if (!hasProjectFile && !hasSrcDir) {
|
||||
const msg = `Worktree health check failed: ${s.basePath} has no recognized project files — refusing to dispatch ${unitType} ${unitId}`;
|
||||
debugLog("runUnitPhase", { phase: "worktree-health-fail", basePath: s.basePath, hasProjectFile, hasSrcDir });
|
||||
ctx.ui.notify(msg, "error");
|
||||
await deps.stopAuto(ctx, pi, msg);
|
||||
return { action: "break", reason: "worktree-invalid" };
|
||||
// Greenfield projects won't have project files yet — the first task creates them.
|
||||
// Log a warning but allow execution to proceed. The .git check above is sufficient
|
||||
// to ensure we're in a valid working directory.
|
||||
debugLog("runUnitPhase", { phase: "worktree-health-warn-greenfield", basePath: s.basePath, hasProjectFile, hasSrcDir });
|
||||
ctx.ui.notify(`Warning: ${s.basePath} has no recognized project files — proceeding as greenfield project`, "warning");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -912,12 +905,6 @@ export async function runUnitPhase(
|
|||
}
|
||||
}
|
||||
|
||||
const repairBlock =
|
||||
deps.buildObservabilityRepairBlock(observabilityIssues);
|
||||
if (repairBlock) {
|
||||
finalPrompt = `${finalPrompt}${repairBlock}`;
|
||||
}
|
||||
|
||||
// Prompt char measurement
|
||||
s.lastPromptCharCount = finalPrompt.length;
|
||||
s.lastBaselineCharCount = undefined;
|
||||
|
|
|
|||
|
|
@ -92,7 +92,6 @@ export interface IterationData {
|
|||
prompt: string;
|
||||
finalPrompt: string;
|
||||
pauseAfterUatDispatch: boolean;
|
||||
observabilityIssues: unknown[];
|
||||
state: GSDState;
|
||||
mid: string | undefined;
|
||||
midTitle: string | undefined;
|
||||
|
|
|
|||
|
|
@ -4,11 +4,13 @@ import type { ExtensionAPI } from "@gsd/pi-coding-agent";
|
|||
import { findMilestoneIds, nextMilestoneId, claimReservedId, getReservedMilestoneIds } from "../guided-flow.js";
|
||||
import { loadEffectiveGSDPreferences } from "../preferences.js";
|
||||
import { ensureDbOpen } from "./dynamic-tools.js";
|
||||
import { StringEnum } from "@gsd/pi-ai";
|
||||
|
||||
/**
|
||||
* Register an alias tool that shares the same execute function as its canonical counterpart.
|
||||
* The alias description and promptGuidelines direct the LLM to prefer the canonical name.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- toolDef shape matches ToolDefinition but typing it fully requires generics
|
||||
function registerAlias(pi: ExtensionAPI, toolDef: any, aliasName: string, canonicalName: string): void {
|
||||
pi.registerTool({
|
||||
...toolDef,
|
||||
|
|
@ -21,7 +23,7 @@ function registerAlias(pi: ExtensionAPI, toolDef: any, aliasName: string, canoni
|
|||
export function registerDbTools(pi: ExtensionAPI): void {
|
||||
// ─── gsd_decision_save (formerly gsd_save_decision) ─────────────────────
|
||||
|
||||
const decisionSaveExecute = async (_toolCallId: any, params: any, _signal: any, _onUpdate: any, _ctx: any) => {
|
||||
const decisionSaveExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
|
|
@ -92,7 +94,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
|
|||
|
||||
// ─── gsd_requirement_update (formerly gsd_update_requirement) ───────────
|
||||
|
||||
const requirementUpdateExecute = async (_toolCallId: any, params: any, _signal: any, _onUpdate: any, _ctx: any) => {
|
||||
const requirementUpdateExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
|
|
@ -162,7 +164,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
|
|||
|
||||
// ─── gsd_summary_save (formerly gsd_save_summary) ──────────────────────
|
||||
|
||||
const summarySaveExecute = async (_toolCallId: any, params: any, _signal: any, _onUpdate: any, _ctx: any) => {
|
||||
const summarySaveExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
|
|
@ -240,7 +242,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
|
|||
|
||||
// ─── gsd_milestone_generate_id (formerly gsd_generate_milestone_id) ────
|
||||
|
||||
const milestoneGenerateIdExecute = async (_toolCallId: any, _params: any, _signal: any, _onUpdate: any, _ctx: any) => {
|
||||
const milestoneGenerateIdExecute = async (_toolCallId: string, _params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
try {
|
||||
// Claim a reserved ID if the guided-flow already previewed one to the user.
|
||||
// This guarantees the ID shown in the UI matches the one materialised on disk.
|
||||
|
|
@ -291,9 +293,247 @@ export function registerDbTools(pi: ExtensionAPI): void {
|
|||
pi.registerTool(milestoneGenerateIdTool);
|
||||
registerAlias(pi, milestoneGenerateIdTool, "gsd_generate_milestone_id", "gsd_milestone_generate_id");
|
||||
|
||||
// ─── gsd_plan_milestone (gsd_milestone_plan alias) ─────────────────────
|
||||
|
||||
const planMilestoneExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: "Error: GSD database is not available. Cannot plan milestone." }],
|
||||
details: { operation: "plan_milestone", error: "db_unavailable" } as any,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const { handlePlanMilestone } = await import("../tools/plan-milestone.js");
|
||||
const result = await handlePlanMilestone(params, process.cwd());
|
||||
if ("error" in result) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error planning milestone: ${result.error}` }],
|
||||
details: { operation: "plan_milestone", error: result.error } as any,
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Planned milestone ${result.milestoneId}` }],
|
||||
details: {
|
||||
operation: "plan_milestone",
|
||||
milestoneId: result.milestoneId,
|
||||
roadmapPath: result.roadmapPath,
|
||||
} as any,
|
||||
};
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
process.stderr.write(`gsd-db: plan_milestone tool failed: ${msg}\n`);
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error planning milestone: ${msg}` }],
|
||||
details: { operation: "plan_milestone", error: msg } as any,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const planMilestoneTool = {
|
||||
name: "gsd_plan_milestone",
|
||||
label: "Plan Milestone",
|
||||
description:
|
||||
"Write milestone planning state to the GSD database, render ROADMAP.md from DB, and clear caches after a successful render.",
|
||||
promptSnippet: "Plan a milestone via DB write + roadmap render + cache invalidation",
|
||||
promptGuidelines: [
|
||||
"Use gsd_plan_milestone for milestone planning instead of writing ROADMAP.md directly.",
|
||||
"Keep parameters flat and provide the full milestone planning payload, including slices.",
|
||||
"The tool validates input, writes milestone and slice planning data transactionally, renders ROADMAP.md from DB, and clears both state and parse caches after success.",
|
||||
"Use the canonical name gsd_plan_milestone; gsd_milestone_plan is only an alias.",
|
||||
],
|
||||
parameters: Type.Object({
|
||||
milestoneId: Type.String({ description: "Milestone ID (e.g. M001)" }),
|
||||
title: Type.String({ description: "Milestone title" }),
|
||||
status: Type.Optional(Type.String({ description: "Milestone status (defaults to active)" })),
|
||||
dependsOn: Type.Optional(Type.Array(Type.String(), { description: "Milestone dependencies" })),
|
||||
vision: Type.String({ description: "Milestone vision" }),
|
||||
successCriteria: Type.Array(Type.String(), { description: "Top-level success criteria bullets" }),
|
||||
keyRisks: Type.Array(Type.Object({
|
||||
risk: Type.String({ description: "Risk statement" }),
|
||||
whyItMatters: Type.String({ description: "Why the risk matters" }),
|
||||
}), { description: "Structured risk entries" }),
|
||||
proofStrategy: Type.Array(Type.Object({
|
||||
riskOrUnknown: Type.String({ description: "Risk or unknown to retire" }),
|
||||
retireIn: Type.String({ description: "Where it will be retired" }),
|
||||
whatWillBeProven: Type.String({ description: "What proof will be produced" }),
|
||||
}), { description: "Structured proof strategy entries" }),
|
||||
verificationContract: Type.String({ description: "Verification contract text" }),
|
||||
verificationIntegration: Type.String({ description: "Integration verification text" }),
|
||||
verificationOperational: Type.String({ description: "Operational verification text" }),
|
||||
verificationUat: Type.String({ description: "UAT verification text" }),
|
||||
definitionOfDone: Type.Array(Type.String(), { description: "Definition of done bullets" }),
|
||||
requirementCoverage: Type.String({ description: "Requirement coverage text" }),
|
||||
boundaryMapMarkdown: Type.String({ description: "Boundary map markdown block" }),
|
||||
slices: Type.Array(Type.Object({
|
||||
sliceId: Type.String({ description: "Slice ID (e.g. S01)" }),
|
||||
title: Type.String({ description: "Slice title" }),
|
||||
risk: Type.String({ description: "Slice risk" }),
|
||||
depends: Type.Array(Type.String(), { description: "Slice dependency IDs" }),
|
||||
demo: Type.String({ description: "Roadmap demo text / After this" }),
|
||||
goal: Type.String({ description: "Slice goal" }),
|
||||
successCriteria: Type.String({ description: "Slice success criteria block" }),
|
||||
proofLevel: Type.String({ description: "Slice proof level" }),
|
||||
integrationClosure: Type.String({ description: "Slice integration closure" }),
|
||||
observabilityImpact: Type.String({ description: "Slice observability impact" }),
|
||||
}), { description: "Planned slices for the milestone" }),
|
||||
}),
|
||||
execute: planMilestoneExecute,
|
||||
};
|
||||
|
||||
pi.registerTool(planMilestoneTool);
|
||||
registerAlias(pi, planMilestoneTool, "gsd_milestone_plan", "gsd_plan_milestone");
|
||||
|
||||
// ─── gsd_plan_slice (gsd_slice_plan alias) ─────────────────────────────
|
||||
|
||||
const planSliceExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: "Error: GSD database is not available. Cannot plan slice." }],
|
||||
details: { operation: "plan_slice", error: "db_unavailable" } as any,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const { handlePlanSlice } = await import("../tools/plan-slice.js");
|
||||
const result = await handlePlanSlice(params, process.cwd());
|
||||
if ("error" in result) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error planning slice: ${result.error}` }],
|
||||
details: { operation: "plan_slice", error: result.error } as any,
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Planned slice ${result.sliceId} (${result.milestoneId})` }],
|
||||
details: {
|
||||
operation: "plan_slice",
|
||||
milestoneId: result.milestoneId,
|
||||
sliceId: result.sliceId,
|
||||
planPath: result.planPath,
|
||||
taskPlanPaths: result.taskPlanPaths,
|
||||
} as any,
|
||||
};
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
process.stderr.write(`gsd-db: plan_slice tool failed: ${msg}\n`);
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error planning slice: ${msg}` }],
|
||||
details: { operation: "plan_slice", error: msg } as any,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const planSliceTool = {
|
||||
name: "gsd_plan_slice",
|
||||
label: "Plan Slice",
|
||||
description:
|
||||
"Write slice planning state to the GSD database, render S##-PLAN.md plus task PLAN artifacts from DB, and clear caches after a successful render.",
|
||||
promptSnippet: "Plan a slice via DB write + PLAN render + cache invalidation",
|
||||
promptGuidelines: [
|
||||
"Use gsd_plan_slice for slice planning instead of writing S##-PLAN.md or task PLAN files directly.",
|
||||
"Keep parameters flat and provide the full slice planning payload, including tasks.",
|
||||
"The tool validates input, requires an existing parent slice, writes slice/task planning data, renders PLAN.md and task plan files from DB, and clears both state and parse caches after success.",
|
||||
"Use the canonical name gsd_plan_slice; gsd_slice_plan is only an alias.",
|
||||
],
|
||||
parameters: Type.Object({
|
||||
milestoneId: Type.String({ description: "Milestone ID (e.g. M001)" }),
|
||||
sliceId: Type.String({ description: "Slice ID (e.g. S01)" }),
|
||||
goal: Type.String({ description: "Slice goal" }),
|
||||
successCriteria: Type.String({ description: "Slice success criteria block" }),
|
||||
proofLevel: Type.String({ description: "Slice proof level" }),
|
||||
integrationClosure: Type.String({ description: "Slice integration closure" }),
|
||||
observabilityImpact: Type.String({ description: "Slice observability impact" }),
|
||||
tasks: Type.Array(Type.Object({
|
||||
taskId: Type.String({ description: "Task ID (e.g. T01)" }),
|
||||
title: Type.String({ description: "Task title" }),
|
||||
description: Type.String({ description: "Task description / steps block" }),
|
||||
estimate: Type.String({ description: "Task estimate string" }),
|
||||
files: Type.Array(Type.String(), { description: "Files likely touched" }),
|
||||
verify: Type.String({ description: "Verification command or block" }),
|
||||
inputs: Type.Array(Type.String(), { description: "Input files or references" }),
|
||||
expectedOutput: Type.Array(Type.String(), { description: "Expected output files or artifacts" }),
|
||||
observabilityImpact: Type.Optional(Type.String({ description: "Task observability impact" })),
|
||||
}), { description: "Planned tasks for the slice" }),
|
||||
}),
|
||||
execute: planSliceExecute,
|
||||
};
|
||||
|
||||
pi.registerTool(planSliceTool);
|
||||
registerAlias(pi, planSliceTool, "gsd_slice_plan", "gsd_plan_slice");
|
||||
|
||||
// ─── gsd_plan_task (gsd_task_plan alias) ───────────────────────────────
|
||||
|
||||
const planTaskExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: "Error: GSD database is not available. Cannot plan task." }],
|
||||
details: { operation: "plan_task", error: "db_unavailable" } as any,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const { handlePlanTask } = await import("../tools/plan-task.js");
|
||||
const result = await handlePlanTask(params, process.cwd());
|
||||
if ("error" in result) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error planning task: ${result.error}` }],
|
||||
details: { operation: "plan_task", error: result.error } as any,
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Planned task ${result.taskId} (${result.sliceId}/${result.milestoneId})` }],
|
||||
details: {
|
||||
operation: "plan_task",
|
||||
milestoneId: result.milestoneId,
|
||||
sliceId: result.sliceId,
|
||||
taskId: result.taskId,
|
||||
taskPlanPath: result.taskPlanPath,
|
||||
} as any,
|
||||
};
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
process.stderr.write(`gsd-db: plan_task tool failed: ${msg}\n`);
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error planning task: ${msg}` }],
|
||||
details: { operation: "plan_task", error: msg } as any,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const planTaskTool = {
|
||||
name: "gsd_plan_task",
|
||||
label: "Plan Task",
|
||||
description:
|
||||
"Write task planning state to the GSD database, render tasks/T##-PLAN.md from DB, and clear caches after a successful render.",
|
||||
promptSnippet: "Plan a task via DB write + task PLAN render + cache invalidation",
|
||||
promptGuidelines: [
|
||||
"Use gsd_plan_task for task planning instead of writing tasks/T##-PLAN.md directly.",
|
||||
"Keep parameters flat and provide the full task planning payload.",
|
||||
"The tool validates input, requires an existing parent slice, writes task planning data, renders the task PLAN file from DB, and clears both state and parse caches after success.",
|
||||
"Use the canonical name gsd_plan_task; gsd_task_plan is only an alias.",
|
||||
],
|
||||
parameters: Type.Object({
|
||||
milestoneId: Type.String({ description: "Milestone ID (e.g. M001)" }),
|
||||
sliceId: Type.String({ description: "Slice ID (e.g. S01)" }),
|
||||
taskId: Type.String({ description: "Task ID (e.g. T01)" }),
|
||||
title: Type.String({ description: "Task title" }),
|
||||
description: Type.String({ description: "Task description / steps block" }),
|
||||
estimate: Type.String({ description: "Task estimate string" }),
|
||||
files: Type.Array(Type.String(), { description: "Files likely touched" }),
|
||||
verify: Type.String({ description: "Verification command or block" }),
|
||||
inputs: Type.Array(Type.String(), { description: "Input files or references" }),
|
||||
expectedOutput: Type.Array(Type.String(), { description: "Expected output files or artifacts" }),
|
||||
observabilityImpact: Type.Optional(Type.String({ description: "Task observability impact" })),
|
||||
}),
|
||||
execute: planTaskExecute,
|
||||
};
|
||||
|
||||
pi.registerTool(planTaskTool);
|
||||
registerAlias(pi, planTaskTool, "gsd_task_plan", "gsd_plan_task");
|
||||
|
||||
// ─── gsd_task_complete (gsd_complete_task alias) ────────────────────────
|
||||
|
||||
const taskCompleteExecute = async (_toolCallId: any, params: any, _signal: any, _onUpdate: any, _ctx: any) => {
|
||||
const taskCompleteExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
|
|
@ -374,7 +614,7 @@ export function registerDbTools(pi: ExtensionAPI): void {
|
|||
|
||||
// ─── gsd_slice_complete (gsd_complete_slice alias) ─────────────────────
|
||||
|
||||
const sliceCompleteExecute = async (_toolCallId: any, params: any, _signal: any, _onUpdate: any, _ctx: any) => {
|
||||
const sliceCompleteExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
|
|
@ -484,4 +724,172 @@ export function registerDbTools(pi: ExtensionAPI): void {
|
|||
|
||||
pi.registerTool(sliceCompleteTool);
|
||||
registerAlias(pi, sliceCompleteTool, "gsd_complete_slice", "gsd_slice_complete");
|
||||
|
||||
// ─── gsd_replan_slice (gsd_slice_replan alias) ─────────────────────────
|
||||
|
||||
const replanSliceExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: "Error: GSD database is not available. Cannot replan slice." }],
|
||||
details: { operation: "replan_slice", error: "db_unavailable" } as any,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const { handleReplanSlice } = await import("../tools/replan-slice.js");
|
||||
const result = await handleReplanSlice(params, process.cwd());
|
||||
if ("error" in result) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error replanning slice: ${result.error}` }],
|
||||
details: { operation: "replan_slice", error: result.error } as any,
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Replanned slice ${result.sliceId} (${result.milestoneId})` }],
|
||||
details: {
|
||||
operation: "replan_slice",
|
||||
milestoneId: result.milestoneId,
|
||||
sliceId: result.sliceId,
|
||||
replanPath: result.replanPath,
|
||||
planPath: result.planPath,
|
||||
} as any,
|
||||
};
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
process.stderr.write(`gsd-db: replan_slice tool failed: ${msg}\n`);
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error replanning slice: ${msg}` }],
|
||||
details: { operation: "replan_slice", error: msg } as any,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const replanSliceTool = {
|
||||
name: "gsd_replan_slice",
|
||||
label: "Replan Slice",
|
||||
description:
|
||||
"Replan a slice after a blocker is discovered. Structurally enforces preservation of completed tasks — " +
|
||||
"mutations to completed task IDs are rejected with actionable error payloads. Writes replan history to DB, " +
|
||||
"applies task mutations, re-renders PLAN.md, and renders REPLAN.md.",
|
||||
promptSnippet: "Replan a GSD slice with structural enforcement of completed tasks",
|
||||
promptGuidelines: [
|
||||
"Use gsd_replan_slice (canonical) or gsd_slice_replan (alias) when a blocker is discovered and the slice plan needs rewriting.",
|
||||
"The tool structurally enforces that completed tasks cannot be updated or removed — violations return specific error payloads naming the blocked task ID.",
|
||||
"Parameters: milestoneId, sliceId, blockerTaskId, blockerDescription, whatChanged, updatedTasks (array), removedTaskIds (array).",
|
||||
"updatedTasks items: taskId, title, description, estimate, files, verify, inputs, expectedOutput.",
|
||||
],
|
||||
parameters: Type.Object({
|
||||
milestoneId: Type.String({ description: "Milestone ID (e.g. M001)" }),
|
||||
sliceId: Type.String({ description: "Slice ID (e.g. S01)" }),
|
||||
blockerTaskId: Type.String({ description: "Task ID that discovered the blocker" }),
|
||||
blockerDescription: Type.String({ description: "Description of the blocker" }),
|
||||
whatChanged: Type.String({ description: "Summary of what changed in the plan" }),
|
||||
updatedTasks: Type.Array(
|
||||
Type.Object({
|
||||
taskId: Type.String({ description: "Task ID (e.g. T01)" }),
|
||||
title: Type.String({ description: "Task title" }),
|
||||
description: Type.String({ description: "Task description / steps block" }),
|
||||
estimate: Type.String({ description: "Task estimate string" }),
|
||||
files: Type.Array(Type.String(), { description: "Files likely touched" }),
|
||||
verify: Type.String({ description: "Verification command or block" }),
|
||||
inputs: Type.Array(Type.String(), { description: "Input files or references" }),
|
||||
expectedOutput: Type.Array(Type.String(), { description: "Expected output files or artifacts" }),
|
||||
}),
|
||||
{ description: "Tasks to upsert (update existing or insert new)" },
|
||||
),
|
||||
removedTaskIds: Type.Array(Type.String(), { description: "Task IDs to remove from the slice" }),
|
||||
}),
|
||||
execute: replanSliceExecute,
|
||||
};
|
||||
|
||||
pi.registerTool(replanSliceTool);
|
||||
registerAlias(pi, replanSliceTool, "gsd_slice_replan", "gsd_replan_slice");
|
||||
|
||||
// ─── gsd_reassess_roadmap (gsd_roadmap_reassess alias) ─────────────────
|
||||
|
||||
const reassessRoadmapExecute = async (_toolCallId: string, params: any, _signal: AbortSignal | undefined, _onUpdate: unknown, _ctx: unknown) => {
|
||||
const dbAvailable = await ensureDbOpen();
|
||||
if (!dbAvailable) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: "Error: GSD database is not available. Cannot reassess roadmap." }],
|
||||
details: { operation: "reassess_roadmap", error: "db_unavailable" } as any,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const { handleReassessRoadmap } = await import("../tools/reassess-roadmap.js");
|
||||
const result = await handleReassessRoadmap(params, process.cwd());
|
||||
if ("error" in result) {
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error reassessing roadmap: ${result.error}` }],
|
||||
details: { operation: "reassess_roadmap", error: result.error } as any,
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Reassessed roadmap for milestone ${result.milestoneId} after ${result.completedSliceId}` }],
|
||||
details: {
|
||||
operation: "reassess_roadmap",
|
||||
milestoneId: result.milestoneId,
|
||||
completedSliceId: result.completedSliceId,
|
||||
assessmentPath: result.assessmentPath,
|
||||
roadmapPath: result.roadmapPath,
|
||||
} as any,
|
||||
};
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
process.stderr.write(`gsd-db: reassess_roadmap tool failed: ${msg}\n`);
|
||||
return {
|
||||
content: [{ type: "text" as const, text: `Error reassessing roadmap: ${msg}` }],
|
||||
details: { operation: "reassess_roadmap", error: msg } as any,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const reassessRoadmapTool = {
|
||||
name: "gsd_reassess_roadmap",
|
||||
label: "Reassess Roadmap",
|
||||
description:
|
||||
"Reassess the milestone roadmap after a slice completes. Structurally enforces preservation of completed slices — " +
|
||||
"mutations to completed slice IDs are rejected with actionable error payloads. Writes assessment to DB, " +
|
||||
"applies slice mutations, re-renders ROADMAP.md, and renders ASSESSMENT.md.",
|
||||
promptSnippet: "Reassess a GSD roadmap with structural enforcement of completed slices",
|
||||
promptGuidelines: [
|
||||
"Use gsd_reassess_roadmap (canonical) or gsd_roadmap_reassess (alias) after a slice completes to reassess the roadmap.",
|
||||
"The tool structurally enforces that completed slices cannot be modified or removed — violations return specific error payloads naming the blocked slice ID.",
|
||||
"Parameters: milestoneId, completedSliceId, verdict, assessment, sliceChanges (object with modified, added, removed arrays).",
|
||||
"sliceChanges.modified items: sliceId, title, risk (optional), depends (optional), demo (optional).",
|
||||
],
|
||||
parameters: Type.Object({
|
||||
milestoneId: Type.String({ description: "Milestone ID (e.g. M001)" }),
|
||||
completedSliceId: Type.String({ description: "Slice ID that just completed" }),
|
||||
verdict: Type.String({ description: "Assessment verdict (e.g. 'roadmap-confirmed', 'roadmap-adjusted')" }),
|
||||
assessment: Type.String({ description: "Assessment text explaining the decision" }),
|
||||
sliceChanges: Type.Object({
|
||||
modified: Type.Array(
|
||||
Type.Object({
|
||||
sliceId: Type.String({ description: "Slice ID to modify" }),
|
||||
title: Type.String({ description: "Updated slice title" }),
|
||||
risk: Type.Optional(Type.String({ description: "Updated risk level" })),
|
||||
depends: Type.Optional(Type.Array(Type.String(), { description: "Updated dependencies" })),
|
||||
demo: Type.Optional(Type.String({ description: "Updated demo text" })),
|
||||
}),
|
||||
{ description: "Slices to modify" },
|
||||
),
|
||||
added: Type.Array(
|
||||
Type.Object({
|
||||
sliceId: Type.String({ description: "New slice ID" }),
|
||||
title: Type.String({ description: "New slice title" }),
|
||||
risk: Type.Optional(Type.String({ description: "Risk level" })),
|
||||
depends: Type.Optional(Type.Array(Type.String(), { description: "Dependencies" })),
|
||||
demo: Type.Optional(Type.String({ description: "Demo text" })),
|
||||
}),
|
||||
{ description: "New slices to add" },
|
||||
),
|
||||
removed: Type.Array(Type.String(), { description: "Slice IDs to remove" }),
|
||||
}, { description: "Slice changes to apply" }),
|
||||
}),
|
||||
execute: reassessRoadmapExecute,
|
||||
};
|
||||
|
||||
pi.registerTool(reassessRoadmapTool);
|
||||
registerAlias(pi, reassessRoadmapTool, "gsd_roadmap_reassess", "gsd_reassess_roadmap");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,7 +44,8 @@ export async function handleCleanupBranches(ctx: ExtensionCommandContext, basePa
|
|||
try {
|
||||
const { listWorktrees } = await import("./worktree-manager.js");
|
||||
const { resolveMilestoneFile } = await import("./paths.js");
|
||||
const { loadFile, parseRoadmap } = await import("./files.js");
|
||||
const { loadFile } = await import("./files.js");
|
||||
const { parseRoadmap } = await import("./parsers-legacy.js");
|
||||
const { isMilestoneComplete } = await import("./state.js");
|
||||
|
||||
const attachedBranches = new Set(
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@
|
|||
import type { Theme } from "@gsd/pi-coding-agent";
|
||||
import { truncateToWidth, visibleWidth, matchesKey, Key } from "@gsd/pi-tui";
|
||||
import { deriveState } from "./state.js";
|
||||
import { loadFile, parseRoadmap, parsePlan } from "./files.js";
|
||||
import { loadFile } from "./files.js";
|
||||
import { isDbAvailable, getMilestoneSlices, getSliceTasks } from "./gsd-db.js";
|
||||
import { resolveMilestoneFile, resolveSliceFile } from "./paths.js";
|
||||
import { getAutoDashboardData } from "./auto.js";
|
||||
import type { AutoDashboardData } from "./auto-dashboard.js";
|
||||
|
|
@ -159,9 +160,14 @@ export class GSDDashboardOverlay {
|
|||
|
||||
const roadmapFile = resolveMilestoneFile(base, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
for (const s of roadmap.slices) {
|
||||
// Normalize slices from DB
|
||||
type NormSlice = { id: string; done: boolean; title: string; risk: string };
|
||||
let normSlices: NormSlice[] = [];
|
||||
if (isDbAvailable()) {
|
||||
normSlices = getMilestoneSlices(mid).map(s => ({ id: s.id, done: s.status === "complete", title: s.title, risk: s.risk || "medium" }));
|
||||
}
|
||||
|
||||
for (const s of normSlices) {
|
||||
const sliceView: SliceView = {
|
||||
id: s.id,
|
||||
title: s.title,
|
||||
|
|
@ -172,19 +178,18 @@ export class GSDDashboardOverlay {
|
|||
};
|
||||
|
||||
if (sliceView.active) {
|
||||
const planFile = resolveSliceFile(base, mid, s.id, "PLAN");
|
||||
const planContent = planFile ? await loadFile(planFile) : null;
|
||||
if (planContent) {
|
||||
const plan = parsePlan(planContent);
|
||||
// Normalize tasks from DB
|
||||
if (isDbAvailable()) {
|
||||
const dbTasks = getSliceTasks(mid, s.id);
|
||||
sliceView.taskProgress = {
|
||||
done: plan.tasks.filter(t => t.done).length,
|
||||
total: plan.tasks.length,
|
||||
done: dbTasks.filter(t => t.status === "complete" || t.status === "done").length,
|
||||
total: dbTasks.length,
|
||||
};
|
||||
for (const t of plan.tasks) {
|
||||
for (const t of dbTasks) {
|
||||
sliceView.tasks.push({
|
||||
id: t.id,
|
||||
title: t.title,
|
||||
done: t.done,
|
||||
done: t.status === "complete" || t.status === "done",
|
||||
active: state.activeTask?.id === t.id,
|
||||
});
|
||||
}
|
||||
|
|
@ -192,7 +197,6 @@ export class GSDDashboardOverlay {
|
|||
}
|
||||
|
||||
view.slices.push(sliceView);
|
||||
}
|
||||
}
|
||||
|
||||
this.milestoneData = view;
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
// GSD Dispatch Guard — prevents out-of-order slice dispatch
|
||||
|
||||
import { readFileSync } from "node:fs";
|
||||
import { readdirSync } from "node:fs";
|
||||
import { resolveMilestoneFile, milestonesDir } from "./paths.js";
|
||||
import { parseRoadmapSlices } from "./roadmap-slices.js";
|
||||
import { resolveMilestoneFile } from "./paths.js";
|
||||
import { findMilestoneIds } from "./guided-flow.js";
|
||||
import { isDbAvailable, getMilestoneSlices } from "./gsd-db.js";
|
||||
|
||||
const SLICE_DISPATCH_TYPES = new Set([
|
||||
"research-slice",
|
||||
|
|
@ -14,28 +12,6 @@ const SLICE_DISPATCH_TYPES = new Set([
|
|||
"complete-slice",
|
||||
]);
|
||||
|
||||
/**
|
||||
* Read a roadmap file from disk (working tree) rather than from a git branch.
|
||||
*
|
||||
* Prior implementation used `git show <branch>:<path>` which read committed
|
||||
* state on a specific branch. This caused false-positive blockers when work
|
||||
* was committed on a milestone/worktree branch but the integration branch
|
||||
* (main) hadn't been updated yet — the guard would see prior slices as
|
||||
* incomplete on main even though they were done in the working tree (#530).
|
||||
*
|
||||
* Reading from disk always reflects the latest state, regardless of which
|
||||
* branch is checked out or whether changes have been committed.
|
||||
*/
|
||||
function readRoadmapFromDisk(base: string, milestoneId: string): string | null {
|
||||
try {
|
||||
const absPath = resolveMilestoneFile(base, milestoneId, "ROADMAP");
|
||||
if (!absPath) return null;
|
||||
return readFileSync(absPath, "utf-8").trim();
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function getPriorSliceCompletionBlocker(
|
||||
base: string,
|
||||
_mainBranch: string,
|
||||
|
|
@ -58,11 +34,19 @@ export function getPriorSliceCompletionBlocker(
|
|||
if (resolveMilestoneFile(base, mid, "PARKED")) continue;
|
||||
if (resolveMilestoneFile(base, mid, "SUMMARY")) continue;
|
||||
|
||||
// Read from disk (working tree) — always has the latest state
|
||||
const roadmapContent = readRoadmapFromDisk(base, mid);
|
||||
if (!roadmapContent) continue;
|
||||
// Normalised slice list from DB
|
||||
type NormSlice = { id: string; done: boolean; depends: string[] };
|
||||
|
||||
if (!isDbAvailable()) continue;
|
||||
|
||||
const rows = getMilestoneSlices(mid);
|
||||
if (rows.length === 0) continue;
|
||||
const slices: NormSlice[] = rows.map((r) => ({
|
||||
id: r.id,
|
||||
done: r.status === "complete",
|
||||
depends: r.depends ?? [],
|
||||
}));
|
||||
|
||||
const slices = parseRoadmapSlices(roadmapContent);
|
||||
if (mid !== targetMid) {
|
||||
const incomplete = slices.find((slice) => !slice.done);
|
||||
if (incomplete) {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,9 @@ import { basename, dirname, join, sep } from "node:path";
|
|||
|
||||
import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js";
|
||||
import { readRepoMeta, externalProjectsRoot, cleanNumberedGsdVariants } from "./repo-identity.js";
|
||||
import { loadFile, parseRoadmap } from "./files.js";
|
||||
import { loadFile } from "./files.js";
|
||||
import { parseRoadmap as parseLegacyRoadmap } from "./parsers-legacy.js";
|
||||
import { isDbAvailable, getMilestoneSlices } from "./gsd-db.js";
|
||||
import { resolveMilestoneFile, milestonesDir, gsdRoot, resolveGsdRootFile, relGsdRootFile } from "./paths.js";
|
||||
import { deriveState, isMilestoneComplete } from "./state.js";
|
||||
import { saveFile } from "./files.js";
|
||||
|
|
@ -51,12 +53,18 @@ export async function checkGitHealth(
|
|||
// Check if milestone is complete via roadmap
|
||||
let isComplete = false;
|
||||
if (milestoneEntry) {
|
||||
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
isComplete = isMilestoneComplete(roadmap);
|
||||
if (isDbAvailable()) {
|
||||
const dbSlices = getMilestoneSlices(milestoneId);
|
||||
isComplete = dbSlices.length > 0 && dbSlices.every(s => s.status === "complete");
|
||||
} else {
|
||||
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseLegacyRoadmap(roadmapContent);
|
||||
isComplete = isMilestoneComplete(roadmap);
|
||||
}
|
||||
}
|
||||
// When DB unavailable and no roadmap, isComplete stays false
|
||||
}
|
||||
|
||||
if (isComplete) {
|
||||
|
|
@ -98,11 +106,17 @@ export async function checkGitHealth(
|
|||
|
||||
const milestoneId = branch.replace(/^milestone\//, "");
|
||||
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (!roadmapContent) continue;
|
||||
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
if (isMilestoneComplete(roadmap)) {
|
||||
let branchMilestoneComplete = false;
|
||||
if (isDbAvailable()) {
|
||||
const dbSlices = getMilestoneSlices(milestoneId);
|
||||
branchMilestoneComplete = dbSlices.length > 0 && dbSlices.every(s => s.status === "complete");
|
||||
} else {
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (!roadmapContent) continue;
|
||||
const roadmap = parseLegacyRoadmap(roadmapContent);
|
||||
branchMilestoneComplete = isMilestoneComplete(roadmap);
|
||||
}
|
||||
if (branchMilestoneComplete) {
|
||||
issues.push({
|
||||
severity: "info",
|
||||
code: "stale_milestone_branch",
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
import { existsSync, mkdirSync, lstatSync, readdirSync, readFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { loadFile, parsePlan, parseRoadmap, parseSummary, saveFile, parseTaskPlanMustHaves, countMustHavesMentionedInSummary } from "./files.js";
|
||||
import { loadFile, parseSummary, saveFile, parseTaskPlanMustHaves, countMustHavesMentionedInSummary } from "./files.js";
|
||||
import { parseRoadmap as parseLegacyRoadmap, parsePlan as parseLegacyPlan } from "./parsers-legacy.js";
|
||||
import { isDbAvailable, getMilestoneSlices, getSliceTasks } from "./gsd-db.js";
|
||||
import { resolveMilestoneFile, resolveMilestonePath, resolveSliceFile, resolveSlicePath, resolveTaskFile, resolveTasksDir, milestonesDir, gsdRoot, relMilestoneFile, relSliceFile, relTaskFile, relSlicePath, relGsdRootFile, resolveGsdRootFile, relMilestonePath } from "./paths.js";
|
||||
import { deriveState, isMilestoneComplete } from "./state.js";
|
||||
import { invalidateAllCaches } from "./cache.js";
|
||||
|
|
@ -213,8 +215,14 @@ export async function selectDoctorScope(basePath: string, requestedScope?: strin
|
|||
const roadmapPath = resolveMilestoneFile(basePath, milestone.id, "ROADMAP");
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (!roadmapContent) continue;
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
if (!isMilestoneComplete(roadmap)) return milestone.id;
|
||||
if (isDbAvailable()) {
|
||||
const dbSlices = getMilestoneSlices(milestone.id);
|
||||
const allDone = dbSlices.length > 0 && dbSlices.every(s => s.status === "complete");
|
||||
if (!allDone) return milestone.id;
|
||||
} else {
|
||||
const roadmap = parseLegacyRoadmap(roadmapContent);
|
||||
if (!isMilestoneComplete(roadmap)) return milestone.id;
|
||||
}
|
||||
}
|
||||
|
||||
return state.registry[0]?.id;
|
||||
|
|
@ -460,7 +468,25 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
|
|||
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
|
||||
const roadmapContent = roadmapPath ? await loadFile(roadmapPath) : null;
|
||||
if (!roadmapContent) continue;
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
|
||||
// Normalize slices: prefer DB, fall back to parser
|
||||
type NormSlice = RoadmapSliceEntry;
|
||||
let slices: NormSlice[];
|
||||
if (isDbAvailable()) {
|
||||
const dbSlices = getMilestoneSlices(milestoneId);
|
||||
slices = dbSlices.map(s => ({
|
||||
id: s.id,
|
||||
title: s.title,
|
||||
done: s.status === "complete",
|
||||
risk: (s.risk || "medium") as RoadmapSliceEntry["risk"],
|
||||
depends: s.depends,
|
||||
demo: s.demo,
|
||||
}));
|
||||
} else {
|
||||
slices = parseLegacyRoadmap(roadmapContent).slices;
|
||||
}
|
||||
// Wrap in Roadmap-compatible shape for detectCircularDependencies
|
||||
const roadmap = { slices };
|
||||
|
||||
// ── Circular dependency detection ──────────────────────────────────────
|
||||
for (const cycle of detectCircularDependencies(roadmap.slices)) {
|
||||
|
|
@ -579,7 +605,17 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
|
|||
|
||||
const planPath = resolveSliceFile(basePath, milestoneId, slice.id, "PLAN");
|
||||
const planContent = planPath ? await loadFile(planPath) : null;
|
||||
const plan = planContent ? parsePlan(planContent) : null;
|
||||
// Normalize plan tasks: prefer DB, fall back to parsers-legacy
|
||||
let plan: { tasks: Array<{ id: string; done: boolean; title: string; estimate?: string }> } | null = null;
|
||||
if (isDbAvailable()) {
|
||||
const dbTasks = getSliceTasks(milestoneId, slice.id);
|
||||
if (dbTasks.length > 0) {
|
||||
plan = { tasks: dbTasks.map(t => ({ id: t.id, done: t.status === "complete" || t.status === "done", title: t.title, estimate: t.estimate || undefined })) };
|
||||
}
|
||||
}
|
||||
if (!plan && planContent) {
|
||||
plan = parseLegacyPlan(planContent);
|
||||
}
|
||||
if (!plan) {
|
||||
if (!slice.done) {
|
||||
issues.push({
|
||||
|
|
@ -710,7 +746,8 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
|
|||
}
|
||||
|
||||
// Milestone-level check: all slices done but no validation file
|
||||
if (isMilestoneComplete(roadmap) && !resolveMilestoneFile(basePath, milestoneId, "VALIDATION") && !resolveMilestoneFile(basePath, milestoneId, "SUMMARY")) {
|
||||
const milestoneComplete = roadmap.slices.length > 0 && roadmap.slices.every(s => s.done);
|
||||
if (milestoneComplete && !resolveMilestoneFile(basePath, milestoneId, "VALIDATION") && !resolveMilestoneFile(basePath, milestoneId, "SUMMARY")) {
|
||||
issues.push({
|
||||
severity: "info",
|
||||
code: "all_slices_done_missing_milestone_validation",
|
||||
|
|
@ -723,7 +760,7 @@ export async function runGSDDoctor(basePath: string, options?: { fix?: boolean;
|
|||
}
|
||||
|
||||
// Milestone-level check: all slices done but no milestone summary
|
||||
if (isMilestoneComplete(roadmap) && !resolveMilestoneFile(basePath, milestoneId, "SUMMARY")) {
|
||||
if (milestoneComplete && !resolveMilestoneFile(basePath, milestoneId, "SUMMARY")) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
code: "all_slices_done_missing_milestone_summary",
|
||||
|
|
|
|||
|
|
@ -10,8 +10,7 @@ import { resolveMilestoneFile, relMilestoneFile, resolveGsdRootFile } from './pa
|
|||
import { milestoneIdSort, findMilestoneIds } from './milestone-ids.js';
|
||||
|
||||
import type {
|
||||
Roadmap, BoundaryMapEntry,
|
||||
SlicePlan, TaskPlanEntry, TaskPlanFile, TaskPlanFrontmatter,
|
||||
TaskPlanFile, TaskPlanFrontmatter,
|
||||
Summary, SummaryFrontmatter, SummaryRequires, FileModified,
|
||||
Continue, ContinueFrontmatter, ContinueStatus,
|
||||
RequirementCounts,
|
||||
|
|
@ -21,9 +20,7 @@ import type {
|
|||
} from './types.js';
|
||||
|
||||
import { checkExistingEnvKeys } from './env-utils.js';
|
||||
import { parseRoadmapSlices } from './roadmap-slices.js';
|
||||
import { nativeParseRoadmap, nativeExtractSection, nativeParsePlanFile, nativeParseSummaryFile, NATIVE_UNAVAILABLE } from './native-parser-bridge.js';
|
||||
import { debugTime, debugCount } from './debug-logger.js';
|
||||
import { nativeExtractSection, nativeParseSummaryFile, NATIVE_UNAVAILABLE } from './native-parser-bridge.js';
|
||||
import { CACHE_MAX } from './constants.js';
|
||||
import { splitFrontmatter, parseFrontmatterMap } from '../shared/frontmatter.js';
|
||||
|
||||
|
|
@ -55,9 +52,22 @@ function cachedParse<T>(content: string, tag: string, parseFn: (c: string) => T)
|
|||
return result;
|
||||
}
|
||||
|
||||
/** Clear the module-scoped parse cache. Call when files change on disk. */
|
||||
// ─── Cross-module cache clear registry ────────────────────────────────────
|
||||
// parsers-legacy.ts registers its cache-clear callback here at module init
|
||||
// to avoid circular imports. clearParseCache() calls all registered callbacks.
|
||||
const _cacheClearCallbacks: (() => void)[] = [];
|
||||
|
||||
/** Register a callback to be invoked when clearParseCache() is called.
|
||||
* Used by parsers-legacy.ts to synchronously clear its own cache. */
|
||||
export function registerCacheClearCallback(cb: () => void): void {
|
||||
_cacheClearCallbacks.push(cb);
|
||||
}
|
||||
|
||||
/** Clear the module-scoped parse cache. Call when files change on disk.
|
||||
* Also clears any registered external caches (e.g. parsers-legacy.ts). */
|
||||
export function clearParseCache(): void {
|
||||
_parseCache.clear();
|
||||
for (const cb of _cacheClearCallbacks) cb();
|
||||
}
|
||||
|
||||
// ─── Helpers ───────────────────────────────────────────────────────────────
|
||||
|
|
@ -117,95 +127,6 @@ export function extractBoldField(text: string, key: string): string | null {
|
|||
return match ? match[1].trim() : null;
|
||||
}
|
||||
|
||||
// ─── Roadmap Parser ────────────────────────────────────────────────────────
|
||||
|
||||
export function parseRoadmap(content: string): Roadmap {
|
||||
return cachedParse(content, 'roadmap', _parseRoadmapImpl);
|
||||
}
|
||||
|
||||
function _parseRoadmapImpl(content: string): Roadmap {
|
||||
const stopTimer = debugTime("parse-roadmap");
|
||||
// Try native parser first for better performance
|
||||
const nativeResult = nativeParseRoadmap(content);
|
||||
if (nativeResult) {
|
||||
stopTimer({ native: true, slices: nativeResult.slices.length, boundaryEntries: nativeResult.boundaryMap.length });
|
||||
debugCount("parseRoadmapCalls");
|
||||
return nativeResult;
|
||||
}
|
||||
|
||||
const lines = content.split('\n');
|
||||
|
||||
const h1 = lines.find(l => l.startsWith('# '));
|
||||
const title = h1 ? h1.slice(2).trim() : '';
|
||||
const vision = extractBoldField(content, 'Vision') || '';
|
||||
|
||||
const scSection = extractSection(content, 'Success Criteria', 2) ||
|
||||
(() => {
|
||||
const idx = content.indexOf('**Success Criteria:**');
|
||||
if (idx === -1) return '';
|
||||
const rest = content.slice(idx);
|
||||
const nextSection = rest.indexOf('\n---');
|
||||
const block = rest.slice(0, nextSection === -1 ? undefined : nextSection);
|
||||
const firstNewline = block.indexOf('\n');
|
||||
return firstNewline === -1 ? '' : block.slice(firstNewline + 1);
|
||||
})();
|
||||
const successCriteria = scSection ? parseBullets(scSection) : [];
|
||||
|
||||
// Slices
|
||||
const slices = parseRoadmapSlices(content);
|
||||
|
||||
// Boundary map
|
||||
const boundaryMap: BoundaryMapEntry[] = [];
|
||||
const bmSection = extractSection(content, 'Boundary Map');
|
||||
|
||||
if (bmSection) {
|
||||
const h3Sections = extractAllSections(bmSection, 3);
|
||||
for (const [heading, sectionContent] of h3Sections) {
|
||||
const arrowMatch = heading.match(/^(\S+)\s*→\s*(\S+)/);
|
||||
if (!arrowMatch) continue;
|
||||
|
||||
const fromSlice = arrowMatch[1];
|
||||
const toSlice = arrowMatch[2];
|
||||
|
||||
let produces = '';
|
||||
let consumes = '';
|
||||
|
||||
// Use indexOf-based parsing instead of [\s\S]*? regex to avoid
|
||||
// catastrophic backtracking on content with code fences (#468).
|
||||
const prodIdx = sectionContent.search(/^Produces:\s*$/m);
|
||||
if (prodIdx !== -1) {
|
||||
const afterProd = sectionContent.indexOf('\n', prodIdx);
|
||||
if (afterProd !== -1) {
|
||||
const consIdx = sectionContent.search(/^Consumes/m);
|
||||
const endIdx = consIdx !== -1 && consIdx > afterProd ? consIdx : sectionContent.length;
|
||||
produces = sectionContent.slice(afterProd + 1, endIdx).trim();
|
||||
}
|
||||
}
|
||||
|
||||
const consLineMatch = sectionContent.match(/^Consumes[^:]*:\s*(.+)$/m);
|
||||
if (consLineMatch) {
|
||||
consumes = consLineMatch[1].trim();
|
||||
}
|
||||
if (!consumes) {
|
||||
const consIdx = sectionContent.search(/^Consumes[^:]*:\s*$/m);
|
||||
if (consIdx !== -1) {
|
||||
const afterCons = sectionContent.indexOf('\n', consIdx);
|
||||
if (afterCons !== -1) {
|
||||
consumes = sectionContent.slice(afterCons + 1).trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boundaryMap.push({ fromSlice, toSlice, produces, consumes });
|
||||
}
|
||||
}
|
||||
|
||||
const result = { title, vision, successCriteria, slices, boundaryMap };
|
||||
stopTimer({ native: false, slices: slices.length, boundaryEntries: boundaryMap.length });
|
||||
debugCount("parseRoadmapCalls");
|
||||
return result;
|
||||
}
|
||||
|
||||
// ─── Secrets Manifest Parser ───────────────────────────────────────────────
|
||||
|
||||
const VALID_STATUSES = new Set<SecretsManifestEntryStatus>(['pending', 'collected', 'skipped']);
|
||||
|
|
@ -314,131 +235,6 @@ export function parseTaskPlanFile(content: string): TaskPlanFile {
|
|||
};
|
||||
}
|
||||
|
||||
export function parsePlan(content: string): SlicePlan {
|
||||
return cachedParse(content, 'plan', _parsePlanImpl);
|
||||
}
|
||||
|
||||
function _parsePlanImpl(content: string): SlicePlan {
|
||||
const stopTimer = debugTime("parse-plan");
|
||||
const [, body] = splitFrontmatter(content);
|
||||
// Try native parser first for better performance
|
||||
const nativeResult = nativeParsePlanFile(body);
|
||||
if (nativeResult) {
|
||||
stopTimer({ native: true });
|
||||
return {
|
||||
id: nativeResult.id,
|
||||
title: nativeResult.title,
|
||||
goal: nativeResult.goal,
|
||||
demo: nativeResult.demo,
|
||||
mustHaves: nativeResult.mustHaves,
|
||||
tasks: nativeResult.tasks.map(t => ({
|
||||
id: t.id,
|
||||
title: t.title,
|
||||
description: t.description,
|
||||
done: t.done,
|
||||
estimate: t.estimate,
|
||||
...(t.files.length > 0 ? { files: t.files } : {}),
|
||||
...(t.verify ? { verify: t.verify } : {}),
|
||||
})),
|
||||
filesLikelyTouched: nativeResult.filesLikelyTouched,
|
||||
};
|
||||
}
|
||||
|
||||
const lines = body.split('\n');
|
||||
|
||||
const h1 = lines.find(l => l.startsWith('# '));
|
||||
let id = '';
|
||||
let title = '';
|
||||
if (h1) {
|
||||
const match = h1.match(/^#\s+(\w+):\s+(.+)/);
|
||||
if (match) {
|
||||
id = match[1];
|
||||
title = match[2].trim();
|
||||
} else {
|
||||
title = h1.slice(2).trim();
|
||||
}
|
||||
}
|
||||
|
||||
const goal = extractBoldField(body, 'Goal') || '';
|
||||
const demo = extractBoldField(body, 'Demo') || '';
|
||||
|
||||
const mhSection = extractSection(body, 'Must-Haves');
|
||||
const mustHaves = mhSection ? parseBullets(mhSection) : [];
|
||||
|
||||
const tasksSection = extractSection(body, 'Tasks');
|
||||
const tasks: TaskPlanEntry[] = [];
|
||||
|
||||
if (tasksSection) {
|
||||
const taskLines = tasksSection.split('\n');
|
||||
let currentTask: TaskPlanEntry | null = null;
|
||||
|
||||
for (const line of taskLines) {
|
||||
const cbMatch = line.match(/^-\s+\[([ xX])\]\s+\*\*([\w.]+):\s+(.+?)\*\*\s*(.*)/);
|
||||
// Heading-style: ### T01 -- Title, ### T01: Title, ### T01 — Title
|
||||
const hdMatch = !cbMatch ? line.match(/^#{2,4}\s+([\w.]+)\s*(?:--|—|:)\s*(.+)/) : null;
|
||||
if (cbMatch || hdMatch) {
|
||||
if (currentTask) tasks.push(currentTask);
|
||||
|
||||
if (cbMatch) {
|
||||
const rest = cbMatch[4] || '';
|
||||
const estMatch = rest.match(/`est:([^`]+)`/);
|
||||
const estimate = estMatch ? estMatch[1] : '';
|
||||
|
||||
currentTask = {
|
||||
id: cbMatch[2],
|
||||
title: cbMatch[3],
|
||||
description: '',
|
||||
done: cbMatch[1].toLowerCase() === 'x',
|
||||
estimate,
|
||||
};
|
||||
} else {
|
||||
const rest = hdMatch![2] || '';
|
||||
const titleEstMatch = rest.match(/^(.+?)\s*`est:([^`]+)`\s*$/);
|
||||
const title = titleEstMatch ? titleEstMatch[1].trim() : rest.trim();
|
||||
const estimate = titleEstMatch ? titleEstMatch[2] : '';
|
||||
|
||||
currentTask = {
|
||||
id: hdMatch![1],
|
||||
title,
|
||||
description: '',
|
||||
done: false,
|
||||
estimate,
|
||||
};
|
||||
}
|
||||
} else if (currentTask && line.match(/^\s*-\s+Files:\s*(.*)/)) {
|
||||
const filesMatch = line.match(/^\s*-\s+Files:\s*(.*)/);
|
||||
if (filesMatch) {
|
||||
currentTask.files = filesMatch[1]
|
||||
.split(',')
|
||||
.map(f => f.replace(/`/g, '').trim())
|
||||
.filter(f => f.length > 0);
|
||||
}
|
||||
} else if (currentTask && line.match(/^\s*-\s+Verify:\s*(.*)/)) {
|
||||
const verifyMatch = line.match(/^\s*-\s+Verify:\s*(.*)/);
|
||||
if (verifyMatch) {
|
||||
currentTask.verify = verifyMatch[1].trim();
|
||||
}
|
||||
} else if (currentTask && line.trim() && !line.startsWith('#')) {
|
||||
const desc = line.trim();
|
||||
if (desc) {
|
||||
currentTask.description = currentTask.description
|
||||
? currentTask.description + ' ' + desc
|
||||
: desc;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (currentTask) tasks.push(currentTask);
|
||||
}
|
||||
|
||||
const filesSection = extractSection(body, 'Files Likely Touched');
|
||||
const filesLikelyTouched = filesSection ? parseBullets(filesSection) : [];
|
||||
|
||||
const result = { id, title, goal, demo, mustHaves, tasks, filesLikelyTouched };
|
||||
stopTimer({ tasks: tasks.length });
|
||||
debugCount("parsePlanCalls");
|
||||
return result;
|
||||
}
|
||||
|
||||
// ─── Summary Parser ────────────────────────────────────────────────────────
|
||||
|
||||
export function parseSummary(content: string): Summary {
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -8,7 +8,8 @@
|
|||
|
||||
import type { ExtensionAPI, ExtensionContext, ExtensionCommandContext } from "@gsd/pi-coding-agent";
|
||||
import { showNextAction } from "../shared/tui.js";
|
||||
import { loadFile, parseRoadmap } from "./files.js";
|
||||
import { loadFile } from "./files.js";
|
||||
import { isDbAvailable, getMilestoneSlices } from "./gsd-db.js";
|
||||
import { loadPrompt, inlineTemplate } from "./prompt-loader.js";
|
||||
import { buildSkillActivationBlock } from "./auto-prompts.js";
|
||||
import { deriveState } from "./state.js";
|
||||
|
|
@ -446,9 +447,13 @@ async function buildDiscussSlicePrompt(
|
|||
}
|
||||
|
||||
// Completed slice summaries — what was already built that this slice builds on
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
for (const s of roadmap.slices) {
|
||||
{
|
||||
type NormSlice = { id: string; done: boolean };
|
||||
let normSlices: NormSlice[] = [];
|
||||
if (isDbAvailable()) {
|
||||
normSlices = getMilestoneSlices(mid).map(s => ({ id: s.id, done: s.status === "complete" }));
|
||||
}
|
||||
for (const s of normSlices) {
|
||||
if (!s.done || s.id === sid) continue;
|
||||
const summaryPath = resolveSliceFile(base, mid, s.id, "SUMMARY");
|
||||
const summaryRel = relSliceFile(base, mid, s.id, "SUMMARY");
|
||||
|
|
@ -575,16 +580,23 @@ export async function showDiscuss(
|
|||
return;
|
||||
}
|
||||
|
||||
// Guard: no roadmap yet
|
||||
// Guard: no roadmap yet (unless DB has slices)
|
||||
const roadmapFile = resolveMilestoneFile(basePath, mid, "ROADMAP");
|
||||
const roadmapContent = roadmapFile ? await loadFile(roadmapFile) : null;
|
||||
if (!roadmapContent) {
|
||||
if (!roadmapContent && !isDbAvailable()) {
|
||||
ctx.ui.notify("No roadmap yet for this milestone. Run /gsd to plan first.", "warning");
|
||||
return;
|
||||
}
|
||||
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const pendingSlices = roadmap.slices.filter(s => !s.done);
|
||||
// Normalize slices: prefer DB, fall back to parser
|
||||
type NormSlice = { id: string; done: boolean; title: string };
|
||||
let normSlices: NormSlice[];
|
||||
if (isDbAvailable()) {
|
||||
normSlices = getMilestoneSlices(mid).map(s => ({ id: s.id, done: s.status === "complete", title: s.title }));
|
||||
} else {
|
||||
normSlices = [];
|
||||
}
|
||||
const pendingSlices = normSlices.filter(s => !s.done);
|
||||
|
||||
if (pendingSlices.length === 0) {
|
||||
ctx.ui.notify("All slices are complete — nothing to discuss.", "info");
|
||||
|
|
|
|||
|
|
@ -8,10 +8,12 @@
|
|||
// Critical invariant: rendered markdown must round-trip through
|
||||
// parseRoadmap(), parsePlan(), parseSummary() in files.ts.
|
||||
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { readFileSync, existsSync, mkdirSync } from "node:fs";
|
||||
import { join, relative } from "node:path";
|
||||
import { createRequire } from "node:module";
|
||||
import {
|
||||
getAllMilestones,
|
||||
getMilestone,
|
||||
getMilestoneSlices,
|
||||
getSliceTasks,
|
||||
getTask,
|
||||
|
|
@ -29,7 +31,7 @@ import {
|
|||
buildTaskFileName,
|
||||
buildSliceFileName,
|
||||
} from "./paths.js";
|
||||
import { saveFile, clearParseCache, parseRoadmap, parsePlan } from "./files.js";
|
||||
import { saveFile, clearParseCache } from "./files.js";
|
||||
import { invalidateStateCache } from "./state.js";
|
||||
import { clearPathCache } from "./paths.js";
|
||||
|
||||
|
|
@ -149,6 +151,277 @@ async function writeAndStore(
|
|||
invalidateCaches();
|
||||
}
|
||||
|
||||
function renderRoadmapMarkdown(milestone: MilestoneRow, slices: SliceRow[]): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
lines.push(`# ${milestone.id}: ${milestone.title || milestone.id}`);
|
||||
lines.push("");
|
||||
lines.push(`**Vision:** ${milestone.vision}`);
|
||||
lines.push("");
|
||||
|
||||
if (milestone.success_criteria.length > 0) {
|
||||
lines.push("## Success Criteria");
|
||||
lines.push("");
|
||||
for (const criterion of milestone.success_criteria) {
|
||||
lines.push(`- ${criterion}`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
lines.push("## Slices");
|
||||
lines.push("");
|
||||
for (const slice of slices) {
|
||||
const done = slice.status === "complete" ? "x" : " ";
|
||||
const depends = `[${(slice.depends ?? []).join(",")}]`;
|
||||
lines.push(`- [${done}] **${slice.id}: ${slice.title}** \`risk:${slice.risk}\` \`depends:${depends}\``);
|
||||
lines.push(` > After this: ${slice.demo}`);
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
if (milestone.boundary_map_markdown.trim()) {
|
||||
lines.push("## Boundary Map");
|
||||
lines.push("");
|
||||
lines.push(milestone.boundary_map_markdown.trim());
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
return `${lines.join("\n").trimEnd()}\n`;
|
||||
}
|
||||
|
||||
function renderTaskPlanMarkdown(task: TaskRow): string {
|
||||
const estimatedSteps = Math.max(1, task.description.trim().split(/\n+/).filter(Boolean).length || 1);
|
||||
const estimatedFiles = task.files.length > 0
|
||||
? task.files.length
|
||||
: task.expected_output.length > 0
|
||||
? task.expected_output.length
|
||||
: task.inputs.length > 0
|
||||
? task.inputs.length
|
||||
: 1;
|
||||
|
||||
const lines: string[] = [];
|
||||
lines.push("---");
|
||||
lines.push(`estimated_steps: ${estimatedSteps}`);
|
||||
lines.push(`estimated_files: ${estimatedFiles}`);
|
||||
lines.push("skills_used: []");
|
||||
lines.push("---");
|
||||
lines.push("");
|
||||
lines.push(`# ${task.id}: ${task.title || task.id}`);
|
||||
lines.push("");
|
||||
|
||||
if (task.description.trim()) {
|
||||
lines.push(task.description.trim());
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
lines.push("## Inputs");
|
||||
lines.push("");
|
||||
if (task.inputs.length > 0) {
|
||||
for (const input of task.inputs) {
|
||||
lines.push(`- \`${input}\``);
|
||||
}
|
||||
} else {
|
||||
lines.push("- None specified.");
|
||||
}
|
||||
lines.push("");
|
||||
|
||||
lines.push("## Expected Output");
|
||||
lines.push("");
|
||||
if (task.expected_output.length > 0) {
|
||||
for (const output of task.expected_output) {
|
||||
lines.push(`- \`${output}\``);
|
||||
}
|
||||
} else if (task.files.length > 0) {
|
||||
for (const file of task.files) {
|
||||
lines.push(`- \`${file}\``);
|
||||
}
|
||||
} else {
|
||||
lines.push("- Update the implementation and proof artifacts needed for this task.");
|
||||
}
|
||||
lines.push("");
|
||||
|
||||
lines.push("## Verification");
|
||||
lines.push("");
|
||||
lines.push(task.verify.trim() || "- Verify the task outcome with the slice-level checks.");
|
||||
lines.push("");
|
||||
|
||||
if (task.observability_impact.trim()) {
|
||||
lines.push("## Observability Impact");
|
||||
lines.push("");
|
||||
lines.push(task.observability_impact.trim());
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
return `${lines.join("\n").trimEnd()}\n`;
|
||||
}
|
||||
|
||||
function renderSlicePlanMarkdown(slice: SliceRow, tasks: TaskRow[]): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
lines.push(`# ${slice.id}: ${slice.title || slice.id}`);
|
||||
lines.push("");
|
||||
lines.push(`**Goal:** ${slice.goal}`);
|
||||
lines.push(`**Demo:** ${slice.demo}`);
|
||||
lines.push("");
|
||||
|
||||
lines.push("## Must-Haves");
|
||||
lines.push("");
|
||||
if (slice.success_criteria.trim()) {
|
||||
for (const line of slice.success_criteria.split(/\n+/).map((entry) => entry.trim()).filter(Boolean)) {
|
||||
lines.push(line.startsWith("-") ? line : `- ${line}`);
|
||||
}
|
||||
} else {
|
||||
lines.push("- Complete the planned slice outcomes.");
|
||||
}
|
||||
lines.push("");
|
||||
|
||||
if (slice.proof_level.trim()) {
|
||||
lines.push("## Proof Level");
|
||||
lines.push("");
|
||||
lines.push(`- This slice proves: ${slice.proof_level.trim()}`);
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
if (slice.integration_closure.trim()) {
|
||||
lines.push("## Integration Closure");
|
||||
lines.push("");
|
||||
lines.push(slice.integration_closure.trim());
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
lines.push("## Verification");
|
||||
lines.push("");
|
||||
if (slice.observability_impact.trim()) {
|
||||
const verificationLines = slice.observability_impact
|
||||
.split(/\n+/)
|
||||
.map((entry) => entry.trim())
|
||||
.filter(Boolean);
|
||||
for (const line of verificationLines) {
|
||||
lines.push(line.startsWith("-") ? line : `- ${line}`);
|
||||
}
|
||||
} else {
|
||||
lines.push("- Run the task and slice verification checks for this slice.");
|
||||
}
|
||||
lines.push("");
|
||||
|
||||
lines.push("## Tasks");
|
||||
lines.push("");
|
||||
for (const task of tasks) {
|
||||
const done = task.status === "done" || task.status === "complete" ? "x" : " ";
|
||||
const estimate = task.estimate.trim() ? ` \`est:${task.estimate.trim()}\`` : "";
|
||||
lines.push(`- [${done}] **${task.id}: ${task.title || task.id}**${estimate}`);
|
||||
if (task.description.trim()) {
|
||||
lines.push(` ${task.description.trim()}`);
|
||||
}
|
||||
if (task.files.length > 0) {
|
||||
lines.push(` - Files: ${task.files.map((file) => `\`${file}\``).join(", ")}`);
|
||||
}
|
||||
if (task.verify.trim()) {
|
||||
lines.push(` - Verify: ${task.verify.trim()}`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
const filesLikelyTouched = Array.from(new Set(tasks.flatMap((task) => task.files)));
|
||||
if (filesLikelyTouched.length > 0) {
|
||||
lines.push("## Files Likely Touched");
|
||||
lines.push("");
|
||||
for (const file of filesLikelyTouched) {
|
||||
lines.push(`- ${file}`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
return `${lines.join("\n").trimEnd()}\n`;
|
||||
}
|
||||
|
||||
export async function renderPlanFromDb(
|
||||
basePath: string,
|
||||
milestoneId: string,
|
||||
sliceId: string,
|
||||
): Promise<{ planPath: string; taskPlanPaths: string[]; content: string }> {
|
||||
const slice = getSlice(milestoneId, sliceId);
|
||||
if (!slice) {
|
||||
throw new Error(`slice ${milestoneId}/${sliceId} not found`);
|
||||
}
|
||||
|
||||
const tasks = getSliceTasks(milestoneId, sliceId);
|
||||
if (tasks.length === 0) {
|
||||
throw new Error(`no tasks found for ${milestoneId}/${sliceId}`);
|
||||
}
|
||||
|
||||
const slicePath = resolveSlicePath(basePath, milestoneId, sliceId)
|
||||
?? join(gsdRoot(basePath), "milestones", milestoneId, "slices", sliceId);
|
||||
const absPath = resolveSliceFile(basePath, milestoneId, sliceId, "PLAN")
|
||||
?? join(slicePath, `${sliceId}-PLAN.md`);
|
||||
const artifactPath = toArtifactPath(absPath, basePath);
|
||||
const content = renderSlicePlanMarkdown(slice, tasks);
|
||||
|
||||
await writeAndStore(absPath, artifactPath, content, {
|
||||
artifact_type: "PLAN",
|
||||
milestone_id: milestoneId,
|
||||
slice_id: sliceId,
|
||||
});
|
||||
|
||||
const taskPlanPaths: string[] = [];
|
||||
for (const task of tasks) {
|
||||
const rendered = await renderTaskPlanFromDb(basePath, milestoneId, sliceId, task.id);
|
||||
taskPlanPaths.push(rendered.taskPlanPath);
|
||||
}
|
||||
|
||||
return { planPath: absPath, taskPlanPaths, content };
|
||||
}
|
||||
|
||||
export async function renderTaskPlanFromDb(
|
||||
basePath: string,
|
||||
milestoneId: string,
|
||||
sliceId: string,
|
||||
taskId: string,
|
||||
): Promise<{ taskPlanPath: string; content: string }> {
|
||||
const task = getTask(milestoneId, sliceId, taskId);
|
||||
if (!task) {
|
||||
throw new Error(`task ${milestoneId}/${sliceId}/${taskId} not found`);
|
||||
}
|
||||
|
||||
const tasksDir = resolveTasksDir(basePath, milestoneId, sliceId)
|
||||
?? join(gsdRoot(basePath), "milestones", milestoneId, "slices", sliceId, "tasks");
|
||||
mkdirSync(tasksDir, { recursive: true });
|
||||
const absPath = join(tasksDir, buildTaskFileName(taskId, "PLAN"));
|
||||
const artifactPath = toArtifactPath(absPath, basePath);
|
||||
const content = renderTaskPlanMarkdown(task);
|
||||
|
||||
await writeAndStore(absPath, artifactPath, content, {
|
||||
artifact_type: "PLAN",
|
||||
milestone_id: milestoneId,
|
||||
slice_id: sliceId,
|
||||
task_id: taskId,
|
||||
});
|
||||
|
||||
return { taskPlanPath: absPath, content };
|
||||
}
|
||||
|
||||
export async function renderRoadmapFromDb(
|
||||
basePath: string,
|
||||
milestoneId: string,
|
||||
): Promise<{ roadmapPath: string; content: string }> {
|
||||
const milestone = getMilestone(milestoneId);
|
||||
if (!milestone) {
|
||||
throw new Error(`milestone ${milestoneId} not found`);
|
||||
}
|
||||
|
||||
const slices = getMilestoneSlices(milestoneId);
|
||||
const absPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP") ??
|
||||
join(gsdRoot(basePath), "milestones", milestoneId, `${milestoneId}-ROADMAP.md`);
|
||||
const artifactPath = toArtifactPath(absPath, basePath);
|
||||
const content = renderRoadmapMarkdown(milestone, slices);
|
||||
|
||||
await writeAndStore(absPath, artifactPath, content, {
|
||||
artifact_type: "ROADMAP",
|
||||
milestone_id: milestoneId,
|
||||
});
|
||||
|
||||
return { roadmapPath: absPath, content };
|
||||
}
|
||||
|
||||
// ─── Roadmap Checkbox Rendering ───────────────────────────────────────────
|
||||
|
||||
/**
|
||||
|
|
@ -493,6 +766,17 @@ export interface StaleEntry {
|
|||
* Logs to stderr when stale files are detected.
|
||||
*/
|
||||
export function detectStaleRenders(basePath: string): StaleEntry[] {
|
||||
// Lazy-load parsers — intentional disk-vs-DB comparison requires parsers
|
||||
const _require = createRequire(import.meta.url);
|
||||
let parseRoadmap: Function, parsePlan: Function;
|
||||
try {
|
||||
const m = _require("./parsers-legacy.ts");
|
||||
parseRoadmap = m.parseRoadmap; parsePlan = m.parsePlan;
|
||||
} catch {
|
||||
const m = _require("./parsers-legacy.js");
|
||||
parseRoadmap = m.parseRoadmap; parsePlan = m.parsePlan;
|
||||
}
|
||||
|
||||
const stale: StaleEntry[] = [];
|
||||
const milestones = getAllMilestones();
|
||||
|
||||
|
|
@ -508,7 +792,7 @@ export function detectStaleRenders(basePath: string): StaleEntry[] {
|
|||
|
||||
for (const slice of slices) {
|
||||
const isCompleteInDb = slice.status === "complete";
|
||||
const roadmapSlice = parsed.slices.find(s => s.id === slice.id);
|
||||
const roadmapSlice = parsed.slices.find((s: { id: string }) => s.id === slice.id);
|
||||
if (!roadmapSlice) continue;
|
||||
|
||||
if (isCompleteInDb && !roadmapSlice.done) {
|
||||
|
|
@ -541,7 +825,7 @@ export function detectStaleRenders(basePath: string): StaleEntry[] {
|
|||
|
||||
for (const task of tasks) {
|
||||
const isDoneInDb = task.status === "done" || task.status === "complete";
|
||||
const planTask = parsed.tasks.find(t => t.id === task.id);
|
||||
const planTask = parsed.tasks.find((t: { id: string }) => t.id === task.id);
|
||||
if (!planTask) continue;
|
||||
|
||||
if (isDoneInDb && !planTask.done) {
|
||||
|
|
@ -719,3 +1003,94 @@ export async function repairStaleRenders(basePath: string): Promise<number> {
|
|||
|
||||
return repairCount;
|
||||
}
|
||||
|
||||
// ─── Replan & Assessment Renderers ────────────────────────────────────────
|
||||
|
||||
export interface ReplanData {
|
||||
blockerTaskId: string;
|
||||
blockerDescription: string;
|
||||
whatChanged: string;
|
||||
}
|
||||
|
||||
export interface AssessmentData {
|
||||
verdict: string;
|
||||
assessment: string;
|
||||
completedSliceId?: string;
|
||||
}
|
||||
|
||||
export async function renderReplanFromDb(
|
||||
basePath: string,
|
||||
milestoneId: string,
|
||||
sliceId: string,
|
||||
replanData: ReplanData,
|
||||
): Promise<{ replanPath: string; content: string }> {
|
||||
const slicePath = resolveSlicePath(basePath, milestoneId, sliceId)
|
||||
?? join(gsdRoot(basePath), "milestones", milestoneId, "slices", sliceId);
|
||||
const absPath = join(slicePath, `${sliceId}-REPLAN.md`);
|
||||
const artifactPath = toArtifactPath(absPath, basePath);
|
||||
|
||||
const lines: string[] = [];
|
||||
lines.push(`# ${sliceId} Replan`);
|
||||
lines.push("");
|
||||
lines.push(`**Milestone:** ${milestoneId}`);
|
||||
lines.push(`**Slice:** ${sliceId}`);
|
||||
lines.push(`**Blocker Task:** ${replanData.blockerTaskId}`);
|
||||
lines.push(`**Created:** ${new Date().toISOString()}`);
|
||||
lines.push("");
|
||||
lines.push("## Blocker Description");
|
||||
lines.push("");
|
||||
lines.push(replanData.blockerDescription);
|
||||
lines.push("");
|
||||
lines.push("## What Changed");
|
||||
lines.push("");
|
||||
lines.push(replanData.whatChanged);
|
||||
lines.push("");
|
||||
|
||||
const content = `${lines.join("\n").trimEnd()}\n`;
|
||||
|
||||
await writeAndStore(absPath, artifactPath, content, {
|
||||
artifact_type: "REPLAN",
|
||||
milestone_id: milestoneId,
|
||||
slice_id: sliceId,
|
||||
});
|
||||
|
||||
return { replanPath: absPath, content };
|
||||
}
|
||||
|
||||
export async function renderAssessmentFromDb(
|
||||
basePath: string,
|
||||
milestoneId: string,
|
||||
sliceId: string,
|
||||
assessmentData: AssessmentData,
|
||||
): Promise<{ assessmentPath: string; content: string }> {
|
||||
const slicePath = resolveSlicePath(basePath, milestoneId, sliceId)
|
||||
?? join(gsdRoot(basePath), "milestones", milestoneId, "slices", sliceId);
|
||||
const absPath = join(slicePath, `${sliceId}-ASSESSMENT.md`);
|
||||
const artifactPath = toArtifactPath(absPath, basePath);
|
||||
|
||||
const lines: string[] = [];
|
||||
lines.push(`# ${sliceId} Assessment`);
|
||||
lines.push("");
|
||||
lines.push(`**Milestone:** ${milestoneId}`);
|
||||
lines.push(`**Slice:** ${sliceId}`);
|
||||
if (assessmentData.completedSliceId) {
|
||||
lines.push(`**Completed Slice:** ${assessmentData.completedSliceId}`);
|
||||
}
|
||||
lines.push(`**Verdict:** ${assessmentData.verdict}`);
|
||||
lines.push(`**Created:** ${new Date().toISOString()}`);
|
||||
lines.push("");
|
||||
lines.push("## Assessment");
|
||||
lines.push("");
|
||||
lines.push(assessmentData.assessment);
|
||||
lines.push("");
|
||||
|
||||
const content = `${lines.join("\n").trimEnd()}\n`;
|
||||
|
||||
await writeAndStore(absPath, artifactPath, content, {
|
||||
artifact_type: "ASSESSMENT",
|
||||
milestone_id: milestoneId,
|
||||
slice_id: sliceId,
|
||||
});
|
||||
|
||||
return { assessmentPath: absPath, content };
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,7 +29,8 @@ import {
|
|||
resolveTaskFiles,
|
||||
} from './paths.js';
|
||||
import { findMilestoneIds } from './guided-flow.js';
|
||||
import { parseRoadmap, parsePlan, parseContextDependsOn } from './files.js';
|
||||
import { parseRoadmap, parsePlan } from './parsers-legacy.js';
|
||||
import { parseContextDependsOn } from './files.js';
|
||||
|
||||
// ─── DECISIONS.md Parser ───────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -536,9 +537,10 @@ export function migrateHierarchyToDb(basePath: string): {
|
|||
// Determine milestone title from roadmap H1 or CONTEXT heading
|
||||
let milestoneTitle = '';
|
||||
let roadmapContent: string | null = null;
|
||||
let roadmap: ReturnType<typeof parseRoadmap> | null = null;
|
||||
if (hasRoadmap) {
|
||||
roadmapContent = readFileSync(roadmapPath!, 'utf-8');
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
roadmap = parseRoadmap(roadmapContent);
|
||||
milestoneTitle = roadmap.title;
|
||||
}
|
||||
if (!milestoneTitle && hasContext) {
|
||||
|
|
@ -554,23 +556,47 @@ export function migrateHierarchyToDb(basePath: string): {
|
|||
dependsOn = parseContextDependsOn(contextContent);
|
||||
}
|
||||
|
||||
// Extract raw "## Boundary Map" section from roadmap markdown for planning column
|
||||
let boundaryMapSection = '';
|
||||
if (roadmapContent) {
|
||||
const bmIdx = roadmapContent.indexOf('## Boundary Map');
|
||||
if (bmIdx >= 0) {
|
||||
const afterBm = roadmapContent.slice(bmIdx);
|
||||
// Take content until next ## heading or EOF
|
||||
const nextHeading = afterBm.indexOf('\n## ', 1);
|
||||
boundaryMapSection = nextHeading >= 0 ? afterBm.slice(0, nextHeading).trim() : afterBm.trim();
|
||||
}
|
||||
}
|
||||
|
||||
// Insert milestone (FK parent — must come first)
|
||||
insertMilestone({
|
||||
id: milestoneId,
|
||||
title: milestoneTitle,
|
||||
status: milestoneStatus,
|
||||
depends_on: dependsOn,
|
||||
planning: {
|
||||
vision: roadmap?.vision ?? '',
|
||||
successCriteria: roadmap?.successCriteria ?? [],
|
||||
boundaryMapMarkdown: boundaryMapSection,
|
||||
},
|
||||
});
|
||||
counts.milestones++;
|
||||
|
||||
// Parse roadmap for slices
|
||||
if (!roadmapContent) continue;
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
if (!roadmap) continue;
|
||||
|
||||
for (const sliceEntry of roadmap.slices) {
|
||||
// Per K002: use 'complete' not 'done'
|
||||
const sliceStatus = sliceEntry.done ? 'complete' : 'pending';
|
||||
|
||||
// Parse slice plan early so goal is available for insertSlice planning column
|
||||
const planPath = resolveSliceFile(basePath, milestoneId, sliceEntry.id, 'PLAN');
|
||||
let plan: ReturnType<typeof parsePlan> | null = null;
|
||||
if (planPath && existsSync(planPath)) {
|
||||
const planContent = readFileSync(planPath, 'utf-8');
|
||||
plan = parsePlan(planContent);
|
||||
}
|
||||
|
||||
insertSlice({
|
||||
id: sliceEntry.id,
|
||||
milestoneId: milestoneId,
|
||||
|
|
@ -579,15 +605,14 @@ export function migrateHierarchyToDb(basePath: string): {
|
|||
risk: sliceEntry.risk,
|
||||
depends: sliceEntry.depends,
|
||||
demo: sliceEntry.demo,
|
||||
planning: {
|
||||
goal: plan?.goal ?? '',
|
||||
},
|
||||
});
|
||||
counts.slices++;
|
||||
|
||||
// Parse slice plan for tasks
|
||||
const planPath = resolveSliceFile(basePath, milestoneId, sliceEntry.id, 'PLAN');
|
||||
if (!planPath || !existsSync(planPath)) continue;
|
||||
|
||||
const planContent = readFileSync(planPath, 'utf-8');
|
||||
const plan = parsePlan(planContent);
|
||||
// Insert tasks from parsed plan
|
||||
if (!plan) continue;
|
||||
|
||||
for (const taskEntry of plan.tasks) {
|
||||
// Per K002: use 'complete' not 'done'
|
||||
|
|
@ -615,6 +640,10 @@ export function migrateHierarchyToDb(basePath: string): {
|
|||
milestoneId: milestoneId,
|
||||
title: taskEntry.title,
|
||||
status: taskStatus,
|
||||
planning: {
|
||||
files: taskEntry.files ?? [],
|
||||
verify: taskEntry.verify ?? '',
|
||||
},
|
||||
});
|
||||
counts.tasks++;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,456 +0,0 @@
|
|||
import { loadFile } from "./files.js";
|
||||
import { resolveSliceFile, resolveTaskFile, resolveTasksDir, resolveTaskFiles } from "./paths.js";
|
||||
|
||||
export interface ValidationIssue {
|
||||
severity: "info" | "warning" | "error";
|
||||
scope: "slice-plan" | "task-plan" | "task-summary" | "slice-summary";
|
||||
file: string;
|
||||
ruleId: string;
|
||||
message: string;
|
||||
suggestion?: string;
|
||||
}
|
||||
|
||||
function getSection(content: string, heading: string, level: number = 2): string | null {
|
||||
const prefix = "#".repeat(level) + " ";
|
||||
const escaped = heading.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const regex = new RegExp(`^${prefix}${escaped}\\s*$`, "m");
|
||||
const match = regex.exec(content);
|
||||
if (!match) return null;
|
||||
|
||||
const start = match.index + match[0].length;
|
||||
const rest = content.slice(start);
|
||||
const nextHeading = rest.match(new RegExp(`^#{1,${level}} `, "m"));
|
||||
const end = nextHeading ? nextHeading.index! : rest.length;
|
||||
return rest.slice(0, end).trim();
|
||||
}
|
||||
|
||||
function getFrontmatter(content: string): string | null {
|
||||
const trimmed = content.trimStart();
|
||||
if (!trimmed.startsWith("---")) return null;
|
||||
const afterFirst = trimmed.indexOf("\n");
|
||||
if (afterFirst === -1) return null;
|
||||
const rest = trimmed.slice(afterFirst + 1);
|
||||
const endIdx = rest.indexOf("\n---");
|
||||
if (endIdx === -1) return null;
|
||||
return rest.slice(0, endIdx);
|
||||
}
|
||||
|
||||
function hasFrontmatterKey(content: string, key: string): boolean {
|
||||
const fm = getFrontmatter(content);
|
||||
if (!fm) return false;
|
||||
return new RegExp(`^${key}:`, "m").test(fm);
|
||||
}
|
||||
|
||||
function normalizeMeaningfulLines(text: string): string[] {
|
||||
return text
|
||||
.split("\n")
|
||||
.map(line => line.trim())
|
||||
.filter(line => line.length > 0)
|
||||
.filter(line => !line.startsWith("<!--"))
|
||||
.filter(line => !line.endsWith("-->"))
|
||||
.filter(line => !/^[-*]\s*\{\{.+\}\}$/.test(line))
|
||||
.filter(line => !/^\{\{.+\}\}$/.test(line));
|
||||
}
|
||||
|
||||
function sectionLooksPlaceholderOnly(text: string | null): boolean {
|
||||
if (!text) return true;
|
||||
const lines = normalizeMeaningfulLines(text)
|
||||
.map(line => line.replace(/^[-*]\s+/, "").trim())
|
||||
.filter(line => line.length > 0);
|
||||
|
||||
if (lines.length === 0) return true;
|
||||
|
||||
return lines.every(line => {
|
||||
const lower = line.toLowerCase();
|
||||
return lower === "none" ||
|
||||
lower.endsWith(": none") ||
|
||||
lower.includes("{{") ||
|
||||
lower.includes("}}") ||
|
||||
lower.startsWith("required for non-trivial") ||
|
||||
lower.startsWith("describe how a future agent") ||
|
||||
lower.startsWith("prefer:") ||
|
||||
lower.startsWith("keep this section concise");
|
||||
});
|
||||
}
|
||||
|
||||
function textSuggestsObservabilityRelevant(content: string): boolean {
|
||||
const lower = content.toLowerCase();
|
||||
const needles = [
|
||||
" api", "route", "server", "worker", "queue", "job", "sync", "import",
|
||||
"webhook", "auth", "db", "database", "migration", "cache", "background",
|
||||
"polling", "realtime", "socket", "stateful", "integration", "ui", "form",
|
||||
"submit", "status", "service", "pipeline", "health endpoint", "error path"
|
||||
];
|
||||
return needles.some(needle => lower.includes(needle));
|
||||
}
|
||||
|
||||
function verificationMentionsDiagnostics(section: string | null): boolean {
|
||||
if (!section) return false;
|
||||
const lower = section.toLowerCase();
|
||||
const needles = [
|
||||
"error", "failure", "diagnostic", "status", "health", "inspect", "log",
|
||||
"network", "console", "retry", "last error", "correlation", "readiness"
|
||||
];
|
||||
return needles.some(needle => lower.includes(needle));
|
||||
}
|
||||
|
||||
export function validateSlicePlanContent(file: string, content: string): ValidationIssue[] {
|
||||
const issues: ValidationIssue[] = [];
|
||||
|
||||
// ── Plan quality rules (always run, not gated by runtime relevance) ──
|
||||
|
||||
const tasksSection = getSection(content, "Tasks", 2);
|
||||
if (tasksSection) {
|
||||
const lines = tasksSection.split("\n");
|
||||
const taskLinePattern = /^- \[[ x]\] \*\*T\d+:/;
|
||||
const taskLineIndices: number[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
if (taskLinePattern.test(lines[i])) taskLineIndices.push(i);
|
||||
}
|
||||
|
||||
for (let t = 0; t < taskLineIndices.length; t++) {
|
||||
const start = taskLineIndices[t];
|
||||
const end = t + 1 < taskLineIndices.length ? taskLineIndices[t + 1] : lines.length;
|
||||
// Check lines between this task header and the next (or section end)
|
||||
const bodyLines = lines.slice(start + 1, end);
|
||||
const meaningful = bodyLines.filter(l => l.trim().length > 0);
|
||||
if (meaningful.length === 0) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "slice-plan",
|
||||
file,
|
||||
ruleId: "empty_task_entry",
|
||||
message: "Inline task entry has no description content beneath the checkbox line.",
|
||||
suggestion: "Add at least a Why/Files/Do/Verify summary so the task is self-describing.",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Observability rules (gated by runtime relevance) ──
|
||||
|
||||
const relevant = textSuggestsObservabilityRelevant(content);
|
||||
if (!relevant) return issues;
|
||||
|
||||
const obs = getSection(content, "Observability / Diagnostics", 2);
|
||||
const verification = getSection(content, "Verification", 2);
|
||||
|
||||
if (!obs) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "slice-plan",
|
||||
file,
|
||||
ruleId: "missing_observability_section",
|
||||
message: "Slice plan appears non-trivial but is missing `## Observability / Diagnostics`.",
|
||||
suggestion: "Add runtime signals, inspection surfaces, failure visibility, and redaction constraints.",
|
||||
});
|
||||
} else if (sectionLooksPlaceholderOnly(obs)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "slice-plan",
|
||||
file,
|
||||
ruleId: "observability_section_placeholder_only",
|
||||
message: "Slice plan has `## Observability / Diagnostics` but it still looks like placeholder text.",
|
||||
suggestion: "Replace placeholders with concrete signals and inspection surfaces a future agent should trust.",
|
||||
});
|
||||
}
|
||||
|
||||
if (!verificationMentionsDiagnostics(verification)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "slice-plan",
|
||||
file,
|
||||
ruleId: "verification_missing_diagnostic_check",
|
||||
message: "Slice verification does not appear to include any diagnostic or failure-path check.",
|
||||
suggestion: "Add at least one verification step for inspectable failure state, structured error output, status surface, or equivalent.",
|
||||
});
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
export function validateTaskPlanContent(file: string, content: string): ValidationIssue[] {
|
||||
const issues: ValidationIssue[] = [];
|
||||
|
||||
// ── Plan quality rules (always run, not gated by runtime relevance) ──
|
||||
|
||||
// Rule: empty or missing Steps section
|
||||
const stepsSection = getSection(content, "Steps", 2);
|
||||
if (stepsSection === null || sectionLooksPlaceholderOnly(stepsSection)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-plan",
|
||||
file,
|
||||
ruleId: "empty_steps_section",
|
||||
message: "Task plan has an empty or missing `## Steps` section.",
|
||||
suggestion: "Add concrete numbered implementation steps so execution has a clear sequence.",
|
||||
});
|
||||
}
|
||||
|
||||
// Rule: placeholder-only Verification section
|
||||
const verificationSection = getSection(content, "Verification", 2);
|
||||
if (verificationSection !== null && sectionLooksPlaceholderOnly(verificationSection)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-plan",
|
||||
file,
|
||||
ruleId: "placeholder_verification",
|
||||
message: "Task plan has `## Verification` but it still looks like placeholder text.",
|
||||
suggestion: "Replace placeholders with concrete verification commands, test runs, or observable checks.",
|
||||
});
|
||||
}
|
||||
|
||||
// Rule: scope estimate thresholds
|
||||
const fm = getFrontmatter(content);
|
||||
if (fm) {
|
||||
const stepsMatch = fm.match(/^estimated_steps:\s*(\d+)/m);
|
||||
const filesMatch = fm.match(/^estimated_files:\s*(\d+)/m);
|
||||
|
||||
if (stepsMatch) {
|
||||
const estimatedSteps = parseInt(stepsMatch[1], 10);
|
||||
if (estimatedSteps >= 10) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-plan",
|
||||
file,
|
||||
ruleId: "scope_estimate_steps_high",
|
||||
message: `Task plan estimates ${estimatedSteps} steps (threshold: 10). Consider splitting into smaller tasks.`,
|
||||
suggestion: "Break the task into sub-tasks or reduce scope so each task stays focused and completable in one pass.",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (filesMatch) {
|
||||
const estimatedFiles = parseInt(filesMatch[1], 10);
|
||||
if (estimatedFiles >= 12) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-plan",
|
||||
file,
|
||||
ruleId: "scope_estimate_files_high",
|
||||
message: `Task plan estimates ${estimatedFiles} files (threshold: 12). Consider splitting into smaller tasks.`,
|
||||
suggestion: "Break the task into sub-tasks or reduce scope to keep the change footprint manageable.",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Rule: Inputs and Expected Output should contain backtick-wrapped file paths
|
||||
const inputsSection = getSection(content, "Inputs", 2);
|
||||
const outputSection = getSection(content, "Expected Output", 2);
|
||||
const backtickPathPattern = /`[^`]*[./][^`]*`/;
|
||||
|
||||
if (outputSection === null || !backtickPathPattern.test(outputSection)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-plan",
|
||||
file,
|
||||
ruleId: "missing_output_file_paths",
|
||||
message: "Task plan `## Expected Output` is missing or has no backtick-wrapped file paths.",
|
||||
suggestion: "List concrete output file paths in backticks (e.g. `src/types.ts`). These are machine-parsed to derive task dependencies.",
|
||||
});
|
||||
}
|
||||
|
||||
if (inputsSection !== null && inputsSection.trim().length > 0 && !backtickPathPattern.test(inputsSection)) {
|
||||
issues.push({
|
||||
severity: "info",
|
||||
scope: "task-plan",
|
||||
file,
|
||||
ruleId: "missing_input_file_paths",
|
||||
message: "Task plan `## Inputs` has content but no backtick-wrapped file paths.",
|
||||
suggestion: "List input file paths in backticks (e.g. `src/config.json`). These are machine-parsed to derive task dependencies.",
|
||||
});
|
||||
}
|
||||
|
||||
// ── Observability rules (gated by runtime relevance) ──
|
||||
|
||||
const relevant = textSuggestsObservabilityRelevant(content);
|
||||
if (!relevant) return issues;
|
||||
|
||||
const obs = getSection(content, "Observability Impact", 2);
|
||||
if (!obs) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-plan",
|
||||
file,
|
||||
ruleId: "missing_observability_impact",
|
||||
message: "Task plan appears runtime-relevant but is missing `## Observability Impact`.",
|
||||
suggestion: "Explain what signals change, how a future agent inspects this task, and what failure state becomes visible.",
|
||||
});
|
||||
} else if (sectionLooksPlaceholderOnly(obs)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-plan",
|
||||
file,
|
||||
ruleId: "observability_impact_placeholder_only",
|
||||
message: "Task plan has `## Observability Impact` but it still looks empty or placeholder-only.",
|
||||
suggestion: "Fill in concrete inspection surfaces or explicitly justify why observability is not applicable.",
|
||||
});
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
export function validateTaskSummaryContent(file: string, content: string): ValidationIssue[] {
|
||||
const issues: ValidationIssue[] = [];
|
||||
if (!hasFrontmatterKey(content, "observability_surfaces")) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-summary",
|
||||
file,
|
||||
ruleId: "missing_observability_frontmatter",
|
||||
message: "Task summary is missing `observability_surfaces` in frontmatter.",
|
||||
suggestion: "List the durable status/log/error surfaces a future agent should use.",
|
||||
});
|
||||
}
|
||||
|
||||
const diagnostics = getSection(content, "Diagnostics", 2);
|
||||
if (!diagnostics) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-summary",
|
||||
file,
|
||||
ruleId: "missing_diagnostics_section",
|
||||
message: "Task summary is missing `## Diagnostics`.",
|
||||
suggestion: "Document how to inspect what this task built later.",
|
||||
});
|
||||
} else if (sectionLooksPlaceholderOnly(diagnostics)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-summary",
|
||||
file,
|
||||
ruleId: "diagnostics_placeholder_only",
|
||||
message: "Task summary diagnostics section still looks like placeholder text.",
|
||||
suggestion: "Replace placeholders with concrete commands, endpoints, logs, error shapes, or failure artifacts.",
|
||||
});
|
||||
}
|
||||
|
||||
const evidence = getSection(content, "Verification Evidence", 2);
|
||||
if (!evidence) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-summary",
|
||||
file,
|
||||
ruleId: "evidence_block_missing",
|
||||
message: "Task summary is missing `## Verification Evidence`.",
|
||||
suggestion: "Add a verification evidence table showing gate check results (command, exit code, verdict, duration).",
|
||||
});
|
||||
} else if (sectionLooksPlaceholderOnly(evidence)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "task-summary",
|
||||
file,
|
||||
ruleId: "evidence_block_placeholder",
|
||||
message: "Task summary verification evidence section still looks like placeholder text.",
|
||||
suggestion: "Replace placeholders with actual gate results or note that no verification commands were discovered.",
|
||||
});
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
export function validateSliceSummaryContent(file: string, content: string): ValidationIssue[] {
|
||||
const issues: ValidationIssue[] = [];
|
||||
if (!hasFrontmatterKey(content, "observability_surfaces")) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "slice-summary",
|
||||
file,
|
||||
ruleId: "missing_observability_frontmatter",
|
||||
message: "Slice summary is missing `observability_surfaces` in frontmatter.",
|
||||
suggestion: "List the authoritative diagnostics and durable inspection surfaces for this slice.",
|
||||
});
|
||||
}
|
||||
|
||||
const diagnostics = getSection(content, "Authoritative diagnostics", 3);
|
||||
if (!diagnostics) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "slice-summary",
|
||||
file,
|
||||
ruleId: "missing_authoritative_diagnostics",
|
||||
message: "Slice summary is missing `### Authoritative diagnostics` in Forward Intelligence.",
|
||||
suggestion: "Tell future agents where to look first and why that signal is trustworthy.",
|
||||
});
|
||||
} else if (sectionLooksPlaceholderOnly(diagnostics)) {
|
||||
issues.push({
|
||||
severity: "warning",
|
||||
scope: "slice-summary",
|
||||
file,
|
||||
ruleId: "authoritative_diagnostics_placeholder_only",
|
||||
message: "Slice summary includes authoritative diagnostics but it still looks like placeholder text.",
|
||||
suggestion: "Replace placeholders with the real first-stop diagnostic surface for this slice.",
|
||||
});
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
export async function validatePlanBoundary(basePath: string, milestoneId: string, sliceId: string): Promise<ValidationIssue[]> {
|
||||
const issues: ValidationIssue[] = [];
|
||||
const slicePlan = resolveSliceFile(basePath, milestoneId, sliceId, "PLAN");
|
||||
if (slicePlan) {
|
||||
const content = await loadFile(slicePlan);
|
||||
if (content) issues.push(...validateSlicePlanContent(slicePlan, content));
|
||||
}
|
||||
|
||||
const tasksDir = resolveTasksDir(basePath, milestoneId, sliceId);
|
||||
const taskPlans = tasksDir ? resolveTaskFiles(tasksDir, "PLAN") : [];
|
||||
for (const file of taskPlans) {
|
||||
const taskId = file.split("-")[0];
|
||||
const taskPlan = resolveTaskFile(basePath, milestoneId, sliceId, taskId, "PLAN");
|
||||
if (!taskPlan) continue;
|
||||
const content = await loadFile(taskPlan);
|
||||
if (content) issues.push(...validateTaskPlanContent(taskPlan, content));
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
export async function validateExecuteBoundary(basePath: string, milestoneId: string, sliceId: string, taskId: string): Promise<ValidationIssue[]> {
|
||||
const issues: ValidationIssue[] = [];
|
||||
const slicePlan = resolveSliceFile(basePath, milestoneId, sliceId, "PLAN");
|
||||
if (slicePlan) {
|
||||
const content = await loadFile(slicePlan);
|
||||
if (content) issues.push(...validateSlicePlanContent(slicePlan, content));
|
||||
}
|
||||
|
||||
const taskPlan = resolveTaskFile(basePath, milestoneId, sliceId, taskId, "PLAN");
|
||||
if (taskPlan) {
|
||||
const content = await loadFile(taskPlan);
|
||||
if (content) issues.push(...validateTaskPlanContent(taskPlan, content));
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
export async function validateCompleteBoundary(basePath: string, milestoneId: string, sliceId: string): Promise<ValidationIssue[]> {
|
||||
const issues: ValidationIssue[] = [];
|
||||
const tasksDir = resolveTasksDir(basePath, milestoneId, sliceId);
|
||||
const taskSummaries = tasksDir ? resolveTaskFiles(tasksDir, "SUMMARY") : [];
|
||||
for (const file of taskSummaries) {
|
||||
const taskId = file.split("-")[0];
|
||||
const taskSummary = resolveTaskFile(basePath, milestoneId, sliceId, taskId, "SUMMARY");
|
||||
if (!taskSummary) continue;
|
||||
const content = await loadFile(taskSummary);
|
||||
if (content) issues.push(...validateTaskSummaryContent(taskSummary, content));
|
||||
}
|
||||
|
||||
const sliceSummary = resolveSliceFile(basePath, milestoneId, sliceId, "SUMMARY");
|
||||
if (sliceSummary) {
|
||||
const content = await loadFile(sliceSummary);
|
||||
if (content) issues.push(...validateSliceSummaryContent(sliceSummary, content));
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
export function formatValidationIssues(issues: ValidationIssue[], limit: number = 4): string {
|
||||
if (issues.length === 0) return "";
|
||||
const lines = issues.slice(0, limit).map(issue => {
|
||||
const fileName = issue.file.split("/").pop() || issue.file;
|
||||
return `- ${fileName}: ${issue.message}`;
|
||||
});
|
||||
if (issues.length > limit) lines.push(`- ...and ${issues.length - limit} more`);
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
|
@ -6,9 +6,9 @@
|
|||
*/
|
||||
|
||||
import { deriveState } from "./state.js";
|
||||
import { parseRoadmap, parsePlan, loadFile } from "./files.js";
|
||||
import { resolveMilestoneFile, resolveSliceFile } from "./paths.js";
|
||||
import { findMilestoneIds } from "./guided-flow.js";
|
||||
import { isDbAvailable, getMilestoneSlices, getSliceTasks } from "./gsd-db.js";
|
||||
import type { MilestoneRegistryEntry } from "./types.js";
|
||||
|
||||
// ─── Types ───────────────────────────────────────────────────────────────────
|
||||
|
|
@ -36,27 +36,23 @@ async function collectTouchedFiles(
|
|||
basePath: string,
|
||||
milestoneId: string,
|
||||
): Promise<string[]> {
|
||||
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP");
|
||||
if (!roadmapPath) return [];
|
||||
|
||||
const roadmapContent = await loadFile(roadmapPath);
|
||||
if (!roadmapContent) return [];
|
||||
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
const files = new Set<string>();
|
||||
|
||||
for (const slice of roadmap.slices) {
|
||||
const planPath = resolveSliceFile(basePath, milestoneId, slice.id, "PLAN");
|
||||
if (!planPath) continue;
|
||||
|
||||
const planContent = await loadFile(planPath);
|
||||
if (!planContent) continue;
|
||||
|
||||
const plan = parsePlan(planContent);
|
||||
for (const f of plan.filesLikelyTouched) {
|
||||
files.add(f);
|
||||
if (isDbAvailable()) {
|
||||
// DB path: query slices and their tasks for file lists
|
||||
const slices = getMilestoneSlices(milestoneId);
|
||||
for (const slice of slices) {
|
||||
const tasks = getSliceTasks(milestoneId, slice.id);
|
||||
for (const task of tasks) {
|
||||
if (Array.isArray(task.files)) {
|
||||
for (const f of task.files) {
|
||||
files.add(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// When DB unavailable, return empty file set — parallel eligibility cannot be determined
|
||||
|
||||
return [...files];
|
||||
}
|
||||
|
|
|
|||
271
src/resources/extensions/gsd/parsers-legacy.ts
Normal file
271
src/resources/extensions/gsd/parsers-legacy.ts
Normal file
|
|
@ -0,0 +1,271 @@
|
|||
// GSD Extension - Legacy Parsers
|
||||
// parseRoadmap() and parsePlan() extracted from files.ts.
|
||||
// Used only by: md-importer.ts (migration), state.ts (pre-migration fallback),
|
||||
// markdown-renderer.ts (detectStaleRenders disk-vs-DB comparison),
|
||||
// commands-maintenance.ts (cold-path branch cleanup), and tests.
|
||||
//
|
||||
// NOT used in the dispatch loop or any hot-path runtime code.
|
||||
|
||||
import { extractSection, parseBullets, extractBoldField, extractAllSections, registerCacheClearCallback } from './files.js';
|
||||
import { splitFrontmatter } from '../shared/frontmatter.js';
|
||||
import { nativeParseRoadmap, nativeParsePlanFile } from './native-parser-bridge.js';
|
||||
import { debugTime, debugCount } from './debug-logger.js';
|
||||
import { CACHE_MAX } from './constants.js';
|
||||
|
||||
import type {
|
||||
Roadmap, BoundaryMapEntry,
|
||||
SlicePlan, TaskPlanEntry,
|
||||
} from './types.js';
|
||||
|
||||
// Re-export parseRoadmapSlices so callers can import all legacy parsers from one module
|
||||
import { parseRoadmapSlices } from './roadmap-slices.js';
|
||||
export { parseRoadmapSlices };
|
||||
|
||||
// ─── Parse Cache (local to this module) ───────────────────────────────────
|
||||
|
||||
/** Fast composite key: length + first/mid/last 100 chars. The middle sample
|
||||
* prevents collisions when only a few characters change in the interior of
|
||||
* a file (e.g., a checkbox [ ] → [x] that doesn't alter length or endpoints). */
|
||||
function cacheKey(content: string): string {
|
||||
const len = content.length;
|
||||
const head = content.slice(0, 100);
|
||||
const midStart = Math.max(0, Math.floor(len / 2) - 50);
|
||||
const mid = len > 200 ? content.slice(midStart, midStart + 100) : '';
|
||||
const tail = len > 100 ? content.slice(-100) : '';
|
||||
return `${len}:${head}:${mid}:${tail}`;
|
||||
}
|
||||
|
||||
const _parseCache = new Map<string, unknown>();
|
||||
|
||||
function cachedParse<T>(content: string, tag: string, parseFn: (c: string) => T): T {
|
||||
const key = tag + '|' + cacheKey(content);
|
||||
if (_parseCache.has(key)) return _parseCache.get(key) as T;
|
||||
if (_parseCache.size >= CACHE_MAX) _parseCache.clear();
|
||||
const result = parseFn(content);
|
||||
_parseCache.set(key, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Clear the legacy parser cache. Called by clearParseCache() in files.ts. */
|
||||
export function clearLegacyParseCache(): void {
|
||||
_parseCache.clear();
|
||||
}
|
||||
|
||||
// Register with files.ts so clearParseCache() also clears our cache
|
||||
registerCacheClearCallback(clearLegacyParseCache);
|
||||
|
||||
// ─── Roadmap Parser ────────────────────────────────────────────────────────
|
||||
|
||||
export function parseRoadmap(content: string): Roadmap {
|
||||
return cachedParse(content, 'roadmap', _parseRoadmapImpl);
|
||||
}
|
||||
|
||||
function _parseRoadmapImpl(content: string): Roadmap {
|
||||
const stopTimer = debugTime("parse-roadmap");
|
||||
// Try native parser first for better performance
|
||||
const nativeResult = nativeParseRoadmap(content);
|
||||
if (nativeResult) {
|
||||
stopTimer({ native: true, slices: nativeResult.slices.length, boundaryEntries: nativeResult.boundaryMap.length });
|
||||
debugCount("parseRoadmapCalls");
|
||||
return nativeResult;
|
||||
}
|
||||
|
||||
const lines = content.split('\n');
|
||||
|
||||
const h1 = lines.find(l => l.startsWith('# '));
|
||||
const title = h1 ? h1.slice(2).trim() : '';
|
||||
const vision = extractBoldField(content, 'Vision') || '';
|
||||
|
||||
const scSection = extractSection(content, 'Success Criteria', 2) ||
|
||||
(() => {
|
||||
const idx = content.indexOf('**Success Criteria:**');
|
||||
if (idx === -1) return '';
|
||||
const rest = content.slice(idx);
|
||||
const nextSection = rest.indexOf('\n---');
|
||||
const block = rest.slice(0, nextSection === -1 ? undefined : nextSection);
|
||||
const firstNewline = block.indexOf('\n');
|
||||
return firstNewline === -1 ? '' : block.slice(firstNewline + 1);
|
||||
})();
|
||||
const successCriteria = scSection ? parseBullets(scSection) : [];
|
||||
|
||||
// Slices
|
||||
const slices = parseRoadmapSlices(content);
|
||||
|
||||
// Boundary map
|
||||
const boundaryMap: BoundaryMapEntry[] = [];
|
||||
const bmSection = extractSection(content, 'Boundary Map');
|
||||
|
||||
if (bmSection) {
|
||||
const h3Sections = extractAllSections(bmSection, 3);
|
||||
for (const [heading, sectionContent] of h3Sections) {
|
||||
const arrowMatch = heading.match(/^(\S+)\s*→\s*(\S+)/);
|
||||
if (!arrowMatch) continue;
|
||||
|
||||
const fromSlice = arrowMatch[1];
|
||||
const toSlice = arrowMatch[2];
|
||||
|
||||
let produces = '';
|
||||
let consumes = '';
|
||||
|
||||
// Use indexOf-based parsing instead of [\s\S]*? regex to avoid
|
||||
// catastrophic backtracking on content with code fences (#468).
|
||||
const prodIdx = sectionContent.search(/^Produces:\s*$/m);
|
||||
if (prodIdx !== -1) {
|
||||
const afterProd = sectionContent.indexOf('\n', prodIdx);
|
||||
if (afterProd !== -1) {
|
||||
const consIdx = sectionContent.search(/^Consumes/m);
|
||||
const endIdx = consIdx !== -1 && consIdx > afterProd ? consIdx : sectionContent.length;
|
||||
produces = sectionContent.slice(afterProd + 1, endIdx).trim();
|
||||
}
|
||||
}
|
||||
|
||||
const consLineMatch = sectionContent.match(/^Consumes[^:]*:\s*(.+)$/m);
|
||||
if (consLineMatch) {
|
||||
consumes = consLineMatch[1].trim();
|
||||
}
|
||||
if (!consumes) {
|
||||
const consIdx = sectionContent.search(/^Consumes[^:]*:\s*$/m);
|
||||
if (consIdx !== -1) {
|
||||
const afterCons = sectionContent.indexOf('\n', consIdx);
|
||||
if (afterCons !== -1) {
|
||||
consumes = sectionContent.slice(afterCons + 1).trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boundaryMap.push({ fromSlice, toSlice, produces, consumes });
|
||||
}
|
||||
}
|
||||
|
||||
const result = { title, vision, successCriteria, slices, boundaryMap };
|
||||
stopTimer({ native: false, slices: slices.length, boundaryEntries: boundaryMap.length });
|
||||
debugCount("parseRoadmapCalls");
|
||||
return result;
|
||||
}
|
||||
|
||||
// ─── Slice Plan Parser ─────────────────────────────────────────────────────
|
||||
|
||||
export function parsePlan(content: string): SlicePlan {
|
||||
return cachedParse(content, 'plan', _parsePlanImpl);
|
||||
}
|
||||
|
||||
function _parsePlanImpl(content: string): SlicePlan {
|
||||
const stopTimer = debugTime("parse-plan");
|
||||
const [, body] = splitFrontmatter(content);
|
||||
// Try native parser first for better performance
|
||||
const nativeResult = nativeParsePlanFile(body);
|
||||
if (nativeResult) {
|
||||
stopTimer({ native: true });
|
||||
return {
|
||||
id: nativeResult.id,
|
||||
title: nativeResult.title,
|
||||
goal: nativeResult.goal,
|
||||
demo: nativeResult.demo,
|
||||
mustHaves: nativeResult.mustHaves,
|
||||
tasks: nativeResult.tasks.map(t => ({
|
||||
id: t.id,
|
||||
title: t.title,
|
||||
description: t.description,
|
||||
done: t.done,
|
||||
estimate: t.estimate,
|
||||
...(t.files.length > 0 ? { files: t.files } : {}),
|
||||
...(t.verify ? { verify: t.verify } : {}),
|
||||
})),
|
||||
filesLikelyTouched: nativeResult.filesLikelyTouched,
|
||||
};
|
||||
}
|
||||
|
||||
const lines = body.split('\n');
|
||||
|
||||
const h1 = lines.find(l => l.startsWith('# '));
|
||||
let id = '';
|
||||
let title = '';
|
||||
if (h1) {
|
||||
const match = h1.match(/^#\s+(\w+):\s+(.+)/);
|
||||
if (match) {
|
||||
id = match[1];
|
||||
title = match[2].trim();
|
||||
} else {
|
||||
title = h1.slice(2).trim();
|
||||
}
|
||||
}
|
||||
|
||||
const goal = extractBoldField(body, 'Goal') || '';
|
||||
const demo = extractBoldField(body, 'Demo') || '';
|
||||
|
||||
const mhSection = extractSection(body, 'Must-Haves');
|
||||
const mustHaves = mhSection ? parseBullets(mhSection) : [];
|
||||
|
||||
const tasksSection = extractSection(body, 'Tasks');
|
||||
const tasks: TaskPlanEntry[] = [];
|
||||
|
||||
if (tasksSection) {
|
||||
const taskLines = tasksSection.split('\n');
|
||||
let currentTask: TaskPlanEntry | null = null;
|
||||
|
||||
for (const line of taskLines) {
|
||||
const cbMatch = line.match(/^-\s+\[([ xX])\]\s+\*\*([\w.]+):\s+(.+?)\*\*\s*(.*)/);
|
||||
// Heading-style: ### T01 -- Title, ### T01: Title, ### T01 — Title
|
||||
const hdMatch = !cbMatch ? line.match(/^#{2,4}\s+([\w.]+)\s*(?:--|—|:)\s*(.+)/) : null;
|
||||
if (cbMatch || hdMatch) {
|
||||
if (currentTask) tasks.push(currentTask);
|
||||
|
||||
if (cbMatch) {
|
||||
const rest = cbMatch[4] || '';
|
||||
const estMatch = rest.match(/`est:([^`]+)`/);
|
||||
const estimate = estMatch ? estMatch[1] : '';
|
||||
|
||||
currentTask = {
|
||||
id: cbMatch[2],
|
||||
title: cbMatch[3],
|
||||
description: '',
|
||||
done: cbMatch[1].toLowerCase() === 'x',
|
||||
estimate,
|
||||
};
|
||||
} else {
|
||||
const rest = hdMatch![2] || '';
|
||||
const titleEstMatch = rest.match(/^(.+?)\s*`est:([^`]+)`\s*$/);
|
||||
const title = titleEstMatch ? titleEstMatch[1].trim() : rest.trim();
|
||||
const estimate = titleEstMatch ? titleEstMatch[2] : '';
|
||||
|
||||
currentTask = {
|
||||
id: hdMatch![1],
|
||||
title,
|
||||
description: '',
|
||||
done: false,
|
||||
estimate,
|
||||
};
|
||||
}
|
||||
} else if (currentTask && line.match(/^\s*-\s+Files:\s*(.*)/)) {
|
||||
const filesMatch = line.match(/^\s*-\s+Files:\s*(.*)/);
|
||||
if (filesMatch) {
|
||||
currentTask.files = filesMatch[1]
|
||||
.split(',')
|
||||
.map(f => f.replace(/`/g, '').trim())
|
||||
.filter(f => f.length > 0);
|
||||
}
|
||||
} else if (currentTask && line.match(/^\s*-\s+Verify:\s*(.*)/)) {
|
||||
const verifyMatch = line.match(/^\s*-\s+Verify:\s*(.*)/);
|
||||
if (verifyMatch) {
|
||||
currentTask.verify = verifyMatch[1].trim();
|
||||
}
|
||||
} else if (currentTask && line.trim() && !line.startsWith('#')) {
|
||||
const desc = line.trim();
|
||||
if (desc) {
|
||||
currentTask.description = currentTask.description
|
||||
? currentTask.description + ' ' + desc
|
||||
: desc;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (currentTask) tasks.push(currentTask);
|
||||
}
|
||||
|
||||
const filesSection = extractSection(body, 'Files Likely Touched');
|
||||
const filesLikelyTouched = filesSection ? parseBullets(filesSection) : [];
|
||||
|
||||
const result = { id, title, goal, demo, mustHaves, tasks, filesLikelyTouched };
|
||||
stopTimer({ tasks: tasks.length });
|
||||
debugCount("parsePlanCalls");
|
||||
return result;
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
Plan milestone {{milestoneId}} ("{{milestoneTitle}}"). Read `.gsd/DECISIONS.md` if it exists — respect existing decisions. Read `.gsd/REQUIREMENTS.md` if it exists and treat Active requirements as the capability contract. If `REQUIREMENTS.md` is missing, continue in legacy compatibility mode but explicitly note missing requirement coverage. Use the **Roadmap** output template below. Create `{{milestoneId}}-ROADMAP.md` in the milestone directory with slices, risk levels, dependencies, demo sentences, verification classes, milestone definition of done, requirement coverage, and a boundary map. Write success criteria as observable truths, not implementation tasks. If the milestone crosses multiple runtime boundaries, include an explicit final integration slice that proves the assembled system works end-to-end in a real environment. If planning produces structural decisions, append them to `.gsd/DECISIONS.md`. {{skillActivation}}
|
||||
Plan milestone {{milestoneId}} ("{{milestoneTitle}}"). Read `.gsd/DECISIONS.md` if it exists — respect existing decisions. Read `.gsd/REQUIREMENTS.md` if it exists and treat Active requirements as the capability contract. If `REQUIREMENTS.md` is missing, continue in legacy compatibility mode but explicitly note missing requirement coverage. Use the **Roadmap** output template below to shape the milestone planning payload you send to `gsd_plan_milestone`. Call `gsd_plan_milestone` to persist the milestone planning fields and render `{{milestoneId}}-ROADMAP.md` from DB state. Do **not** write `{{milestoneId}}-ROADMAP.md`, `ROADMAP.md`, or other planning artifacts manually. If planning produces structural decisions, append them to `.gsd/DECISIONS.md`. {{skillActivation}}
|
||||
|
||||
## Requirement Rules
|
||||
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ Then:
|
|||
2. {{skillActivation}}
|
||||
3. Create the roadmap: decompose into demoable vertical slices — as many as the work genuinely needs, no more. A simple feature might be 1 slice. Don't decompose for decomposition's sake.
|
||||
4. Order by risk (high-risk first)
|
||||
5. Write `{{outputPath}}` with checkboxes, risk, depends, demo sentences, proof strategy, verification classes, milestone definition of done, **requirement coverage**, and a boundary map. Write success criteria as observable truths, not implementation tasks. If the milestone crosses multiple runtime boundaries, include an explicit final integration slice that proves the assembled system works end-to-end in a real environment
|
||||
5. Call `gsd_plan_milestone` to persist the milestone planning fields and slice rows in the DB-backed planning path. Do **not** write `{{outputPath}}`, `ROADMAP.md`, or other planning artifacts manually — the planning tool owns roadmap rendering and persistence.
|
||||
6. If planning produced structural decisions (e.g. slice ordering rationale, technology choices, scope exclusions), append them to `.gsd/DECISIONS.md` (use the **Decisions** output template from the inlined context above if the file doesn't exist yet)
|
||||
|
||||
## Requirement Mapping Rules
|
||||
|
|
@ -107,6 +107,4 @@ If this milestone requires any external API keys or secrets:
|
|||
|
||||
If this milestone does not require any external API keys or secrets, skip this step entirely — do not create an empty manifest.
|
||||
|
||||
**You MUST write the file `{{outputPath}}` before finishing.**
|
||||
|
||||
When done, say: "Milestone {{milestoneId}} planned."
|
||||
|
|
|
|||
|
|
@ -63,9 +63,8 @@ Then:
|
|||
- a matching task plan file with description, steps, must-haves, verification, inputs, and expected output
|
||||
- **Inputs and Expected Output must list concrete backtick-wrapped file paths** (e.g. `` `src/types.ts` ``). These are machine-parsed to derive task dependencies — vague prose without paths breaks parallel execution. Every task must have at least one output file path.
|
||||
- Observability Impact section **only if the task touches runtime boundaries, async flows, or error paths** — omit it otherwise
|
||||
6. Write `{{outputPath}}`
|
||||
7. Write individual task plans in `{{slicePath}}/tasks/`: `T01-PLAN.md`, `T02-PLAN.md`, etc.
|
||||
8. **Self-audit the plan.** Walk through each check — if any fail, fix the plan files before moving on:
|
||||
6. **Persist planning state through DB-backed tools.** Call `gsd_plan_slice` with the full slice planning payload (goal, demo, must-haves, verification, tasks, and metadata). Then call `gsd_plan_task` for each task to persist its planning fields. These tools write to the DB and render `{{outputPath}}` and `{{slicePath}}/tasks/T##-PLAN.md` files automatically. Do **not** rely on direct `PLAN.md` writes as the source of truth; the DB-backed tools are the canonical write path for slice and task planning state.
|
||||
7. **Self-audit the plan.** Walk through each check — if any fail, fix the plan files before moving on:
|
||||
- **Completion semantics:** If every task were completed exactly as written, the slice goal/demo should actually be true.
|
||||
- **Requirement coverage:** Every must-have in the slice maps to at least one task. No must-have is orphaned. If `REQUIREMENTS.md` exists, every Active requirement this slice owns maps to at least one task.
|
||||
- **Task completeness:** Every task has steps, must-haves, verification, inputs, and expected output — none are blank or vague. Inputs and Expected Output list backtick-wrapped file paths, not prose descriptions.
|
||||
|
|
@ -73,11 +72,9 @@ Then:
|
|||
- **Key links planned:** For every pair of artifacts that must connect, there is an explicit step that wires them.
|
||||
- **Scope sanity:** Target 2–5 steps and 3–8 files per task. 10+ steps or 12+ files — must split. Each task must be completable in a single fresh context window.
|
||||
- **Feature completeness:** Every task produces real, user-facing progress — not just internal scaffolding.
|
||||
9. If planning produced structural decisions, append them to `.gsd/DECISIONS.md`
|
||||
10. {{commitInstruction}}
|
||||
8. If planning produced structural decisions, append them to `.gsd/DECISIONS.md`
|
||||
9. {{commitInstruction}}
|
||||
|
||||
The slice directory and tasks/ subdirectory already exist. Do NOT mkdir. All work stays in your working directory: `{{workingDirectory}}`.
|
||||
|
||||
**You MUST write the file `{{outputPath}}` before finishing.**
|
||||
|
||||
When done, say: "Slice {{sliceId}} planned."
|
||||
|
|
|
|||
|
|
@ -50,15 +50,14 @@ If all criteria have at least one remaining owning slice, the coverage check pas
|
|||
|
||||
**If the roadmap is still good:**
|
||||
|
||||
Write `{{assessmentPath}}` with a brief confirmation that roadmap coverage still holds after {{completedSliceId}}. If requirements exist, explicitly note whether requirement coverage remains sound.
|
||||
Write `{{assessmentPath}}` with a brief confirmation that roadmap coverage still holds after {{completedSliceId}}. If requirements exist, explicitly note whether requirement coverage remains sound. If `gsd_reassess_roadmap` is available, use it with `verdict: "roadmap-confirmed"`, an empty `sliceChanges` object, and the assessment text — the tool writes the assessment to the DB and renders ASSESSMENT.md.
|
||||
|
||||
**If changes are needed:**
|
||||
|
||||
1. Rewrite the remaining (unchecked) slices in `{{roadmapPath}}`. Keep completed slices exactly as they are (`[x]`). Update the boundary map for changed slices. Update the proof strategy if risks changed. Update requirement coverage if ownership or scope changed.
|
||||
2. Write `{{assessmentPath}}` explaining what changed and why — keep it brief and concrete.
|
||||
3. If `.gsd/REQUIREMENTS.md` exists and requirement ownership or status changed, update it.
|
||||
4. {{commitInstruction}}
|
||||
|
||||
**You MUST write the file `{{assessmentPath}}` before finishing.**
|
||||
1. **Persist changes through `gsd_reassess_roadmap`.** Pass: `milestoneId`, `completedSliceId`, `verdict` (e.g. "roadmap-adjusted"), `assessment` (text explaining the decision), and `sliceChanges` with `modified` (array of sliceId, title, risk, depends, demo), `added` (same shape), `removed` (array of slice ID strings). The tool structurally enforces preservation of completed slices, writes the assessment to the DB, re-renders ROADMAP.md, and renders ASSESSMENT.md. Skip step 2 when this tool succeeds.
|
||||
2. **Degraded fallback — direct file writes:** If `gsd_reassess_roadmap` is not available, rewrite the remaining (unchecked) slices in `{{roadmapPath}}` directly. Keep completed slices exactly as they are (`[x]`). Update the boundary map for changed slices. Update the proof strategy if risks changed. Update requirement coverage if ownership or scope changed.
|
||||
3. Write `{{assessmentPath}}` explaining what changed and why — keep it brief and concrete.
|
||||
4. If `.gsd/REQUIREMENTS.md` exists and requirement ownership or status changed, update it.
|
||||
5. {{commitInstruction}}
|
||||
|
||||
When done, say: "Roadmap reassessed."
|
||||
|
|
|
|||
|
|
@ -32,19 +32,19 @@ Consider these captures when rewriting the remaining tasks — they represent th
|
|||
|
||||
1. Read the blocker task summary carefully. Understand exactly what was discovered and why it blocks the current plan.
|
||||
2. Analyze the remaining `[ ]` tasks in the slice plan. Determine which are still valid, which need modification, and which should be replaced.
|
||||
3. Write `{{replanPath}}` documenting:
|
||||
3. **Persist replan state through `gsd_replan_slice`.** Call it with the following parameters: `milestoneId`, `sliceId`, `blockerTaskId`, `blockerDescription`, `whatChanged`, `updatedTasks` (array of task objects with taskId, title, description, estimate, files, verify, inputs, expectedOutput), `removedTaskIds` (array of task ID strings). The tool structurally enforces preservation of completed tasks, writes replan history to the DB, re-renders PLAN.md, and renders REPLAN.md. Skip steps 4–5 when this tool succeeds.
|
||||
4. **Degraded fallback — direct file writes:** If `gsd_replan_slice` is not available, fall back to writing files directly. Write `{{replanPath}}` documenting:
|
||||
- What blocker was discovered and in which task
|
||||
- What changed in the plan and why
|
||||
- Which incomplete tasks were modified, added, or removed
|
||||
- Any new risks or considerations introduced by the replan
|
||||
4. Rewrite `{{planPath}}` with the updated slice plan:
|
||||
5. If using the degraded fallback, rewrite `{{planPath}}` with the updated slice plan:
|
||||
- Keep all `[x]` tasks exactly as they were (same IDs, same descriptions, same checkmarks)
|
||||
- Update the `[ ]` tasks to address the blocker
|
||||
- Ensure the slice Goal and Demo sections are still achievable with the new tasks, or update them if the blocker fundamentally changes what the slice can deliver
|
||||
- Update the Files Likely Touched section if the replan changes which files are affected
|
||||
5. If any incomplete task had a `T0x-PLAN.md`, remove or rewrite it to match the new task description.
|
||||
6. Do not commit manually — the system auto-commits your changes after this unit completes.
|
||||
|
||||
**You MUST write `{{replanPath}}` and the updated slice plan before finishing.**
|
||||
- If a DB-backed planning tool exists for this phase, use it as the source of truth and make any rewritten `PLAN.md` reflect that persisted state rather than bypassing it
|
||||
6. If any incomplete task had a `T0x-PLAN.md`, remove or rewrite it to match the new task description.
|
||||
7. Do not commit manually — the system auto-commits your changes after this unit completes.
|
||||
|
||||
When done, say: "Slice {{sliceId}} replanned."
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@
|
|||
*/
|
||||
|
||||
import type { TaskIO, DerivedTaskNode, ReactiveExecutionState } from "./types.js";
|
||||
import { loadFile, parsePlan, parseTaskPlanIO } from "./files.js";
|
||||
import { loadFile, parseTaskPlanIO } from "./files.js";
|
||||
import { isDbAvailable, getSliceTasks } from "./gsd-db.js";
|
||||
import { resolveTasksDir, resolveTaskFiles } from "./paths.js";
|
||||
import { join } from "node:path";
|
||||
import { loadJsonFileOrNull, saveJsonFile } from "./json-persistence.js";
|
||||
|
|
@ -188,13 +189,32 @@ export async function loadSliceTaskIO(
|
|||
const planContent = slicePlanPath ? await loadFile(slicePlanPath) : null;
|
||||
if (!planContent) return [];
|
||||
|
||||
const plan = parsePlan(planContent);
|
||||
// DB primary path — get task entries
|
||||
let taskEntries: { id: string; title: string; done: boolean }[] | null = null;
|
||||
try {
|
||||
if (isDbAvailable()) {
|
||||
const tasks = getSliceTasks(mid, sid);
|
||||
if (tasks.length > 0) {
|
||||
taskEntries = tasks.map(t => ({
|
||||
id: t.id,
|
||||
title: t.title,
|
||||
done: t.status === "complete" || t.status === "done",
|
||||
}));
|
||||
}
|
||||
}
|
||||
} catch { /* fall through */ }
|
||||
|
||||
if (!taskEntries) {
|
||||
// DB unavailable — cannot determine task graph
|
||||
return [];
|
||||
}
|
||||
|
||||
const tDir = resolveTasksDir(basePath, mid, sid);
|
||||
if (!tDir) return [];
|
||||
|
||||
const results: TaskIO[] = [];
|
||||
|
||||
for (const taskEntry of plan.tasks) {
|
||||
for (const taskEntry of taskEntries) {
|
||||
const planFiles = resolveTaskFiles(tDir, "PLAN");
|
||||
const taskFileName = planFiles.find((f) =>
|
||||
f.toUpperCase().startsWith(taskEntry.id.toUpperCase() + "-"),
|
||||
|
|
|
|||
|
|
@ -14,6 +14,9 @@ import type {
|
|||
import {
|
||||
parseRoadmap,
|
||||
parsePlan,
|
||||
} from './parsers-legacy.js';
|
||||
|
||||
import {
|
||||
parseSummary,
|
||||
loadFile,
|
||||
parseRequirementCounts,
|
||||
|
|
@ -43,6 +46,8 @@ import {
|
|||
getAllMilestones,
|
||||
getMilestoneSlices,
|
||||
getSliceTasks,
|
||||
getReplanHistory,
|
||||
getSlice,
|
||||
type MilestoneRow,
|
||||
type SliceRow,
|
||||
type TaskRow,
|
||||
|
|
@ -639,8 +644,10 @@ export async function deriveStateFromDb(basePath: string): Promise<GSDState> {
|
|||
}
|
||||
|
||||
if (blockerTaskId) {
|
||||
const replanFile = resolveSliceFile(basePath, activeMilestone.id, activeSlice.id, "REPLAN");
|
||||
if (!replanFile) {
|
||||
// Loop protection: if replan_history has entries for this slice, a replan
|
||||
// was already performed — don't re-enter replanning phase.
|
||||
const replanHistory = getReplanHistory(activeMilestone.id, activeSlice.id);
|
||||
if (replanHistory.length === 0) {
|
||||
return {
|
||||
activeMilestone, activeSlice, activeTask,
|
||||
phase: 'replanning-slice',
|
||||
|
|
@ -656,10 +663,11 @@ export async function deriveStateFromDb(basePath: string): Promise<GSDState> {
|
|||
|
||||
// ── REPLAN-TRIGGER detection ─────────────────────────────────────────
|
||||
if (!blockerTaskId) {
|
||||
const replanTriggerFile = resolveSliceFile(basePath, activeMilestone.id, activeSlice.id, "REPLAN-TRIGGER");
|
||||
if (replanTriggerFile) {
|
||||
const replanFile = resolveSliceFile(basePath, activeMilestone.id, activeSlice.id, "REPLAN");
|
||||
if (!replanFile) {
|
||||
const sliceRow = getSlice(activeMilestone.id, activeSlice.id);
|
||||
if (sliceRow?.replan_triggered_at) {
|
||||
// Loop protection: if replan_history has entries, replan was already done
|
||||
const replanHistory = getReplanHistory(activeMilestone.id, activeSlice.id);
|
||||
if (replanHistory.length === 0) {
|
||||
return {
|
||||
activeMilestone, activeSlice, activeTask,
|
||||
phase: 'replanning-slice',
|
||||
|
|
|
|||
|
|
@ -366,8 +366,6 @@ function makeMockDeps(
|
|||
runPreDispatchHooks: () => ({ firedHooks: [], action: "proceed" }),
|
||||
getPriorSliceCompletionBlocker: () => null,
|
||||
getMainBranch: () => "main",
|
||||
collectObservabilityWarnings: async () => [],
|
||||
buildObservabilityRepairBlock: () => null,
|
||||
closeoutUnit: async () => {},
|
||||
verifyExpectedArtifact: () => true,
|
||||
clearUnitRuntimeRecord: () => {},
|
||||
|
|
@ -2069,7 +2067,7 @@ test("autoLoop stops when worktree has no .git for execute-task (#1833)", async
|
|||
);
|
||||
});
|
||||
|
||||
test("autoLoop stops when worktree has no project files for execute-task (#1833)", async () => {
|
||||
test("autoLoop warns but proceeds for greenfield project (no project files) (#1833)", async () => {
|
||||
_resetPendingResolve();
|
||||
|
||||
const ctx = makeMockCtx();
|
||||
|
|
@ -2078,10 +2076,17 @@ test("autoLoop stops when worktree has no project files for execute-task (#1833)
|
|||
const pi = makeMockPi();
|
||||
|
||||
const notifications: string[] = [];
|
||||
ctx.ui.notify = (msg: string) => { notifications.push(msg); };
|
||||
|
||||
const s = makeLoopSession({ basePath: "/tmp/empty-worktree" });
|
||||
|
||||
ctx.ui.notify = (msg: string) => {
|
||||
notifications.push(msg);
|
||||
// Terminate the loop after the greenfield warning fires,
|
||||
// so we don't hang waiting for dispatch resolution.
|
||||
if (msg.includes("greenfield")) {
|
||||
s.active = false;
|
||||
}
|
||||
};
|
||||
|
||||
const deps = makeMockDeps({
|
||||
deriveState: async () => {
|
||||
deps.callLog.push("deriveState");
|
||||
|
|
@ -2100,15 +2105,19 @@ test("autoLoop stops when worktree has no project files for execute-task (#1833)
|
|||
|
||||
await autoLoop(ctx, pi, s, deps);
|
||||
|
||||
assert.ok(
|
||||
deps.callLog.includes("stopAuto"),
|
||||
"should stop auto-mode when worktree has no project files",
|
||||
);
|
||||
const healthNotification = notifications.find(
|
||||
(n) => n.includes("Worktree health check failed") && n.includes("no recognized project files"),
|
||||
// Should NOT have stopped auto-mode due to health check — greenfield is allowed
|
||||
const stoppedForHealth = notifications.find(
|
||||
(n) => n.includes("Worktree health check failed"),
|
||||
);
|
||||
assert.ok(
|
||||
healthNotification,
|
||||
"should notify about missing project files in worktree",
|
||||
!stoppedForHealth,
|
||||
"should not stop with health check failure for greenfield project",
|
||||
);
|
||||
const greenfieldWarning = notifications.find(
|
||||
(n) => n.includes("no recognized project files") && n.includes("greenfield"),
|
||||
);
|
||||
assert.ok(
|
||||
greenfieldWarning,
|
||||
"should warn about greenfield project (no project files)",
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -13,9 +13,18 @@ import {
|
|||
selfHealRuntimeRecords,
|
||||
hasImplementationArtifacts,
|
||||
} from "../auto-recovery.ts";
|
||||
import { parseRoadmap, clearParseCache } from "../files.ts";
|
||||
import { parseRoadmap, parsePlan } from "../parsers-legacy.ts";
|
||||
import { parseTaskPlanFile, clearParseCache } from "../files.ts";
|
||||
import { invalidateAllCaches } from "../cache.ts";
|
||||
import { deriveState, invalidateStateCache } from "../state.ts";
|
||||
import {
|
||||
openDatabase,
|
||||
closeDatabase,
|
||||
insertMilestone,
|
||||
insertSlice,
|
||||
insertTask,
|
||||
} from "../gsd-db.ts";
|
||||
import { renderPlanFromDb } from "../markdown-renderer.ts";
|
||||
|
||||
function makeTmpBase(): string {
|
||||
const base = join(tmpdir(), `gsd-test-${randomUUID()}`);
|
||||
|
|
@ -470,6 +479,143 @@ test("verifyExpectedArtifact execute-task passes for heading-style plan entry (#
|
|||
}
|
||||
});
|
||||
|
||||
test("verifyExpectedArtifact plan-slice passes for rendered slice/task plan artifacts from DB", async () => {
|
||||
const base = makeTmpBase();
|
||||
const dbPath = join(base, ".gsd", "gsd.db");
|
||||
openDatabase(dbPath);
|
||||
try {
|
||||
insertMilestone({ id: "M001", title: "Milestone", status: "active" });
|
||||
insertSlice({
|
||||
id: "S01",
|
||||
milestoneId: "M001",
|
||||
title: "Rendered slice",
|
||||
status: "pending",
|
||||
demo: "Rendered plan artifacts exist.",
|
||||
planning: {
|
||||
goal: "Render plans from DB rows.",
|
||||
successCriteria: "- Slice plan parses\n- Task plan files exist on disk",
|
||||
proofLevel: "integration",
|
||||
integrationClosure: "DB rows are the source of truth for PLAN artifacts.",
|
||||
observabilityImpact: "- Recovery verification fails if a task plan file is missing",
|
||||
},
|
||||
});
|
||||
insertTask({
|
||||
id: "T01",
|
||||
sliceId: "S01",
|
||||
milestoneId: "M001",
|
||||
title: "Render plan",
|
||||
status: "pending",
|
||||
planning: {
|
||||
description: "Create the slice plan from DB state.",
|
||||
estimate: "30m",
|
||||
files: ["src/resources/extensions/gsd/markdown-renderer.ts"],
|
||||
verify: "node --test markdown-renderer.test.ts",
|
||||
inputs: ["src/resources/extensions/gsd/gsd-db.ts"],
|
||||
expectedOutput: ["src/resources/extensions/gsd/tests/markdown-renderer.test.ts"],
|
||||
observabilityImpact: "Renderer tests cover the failure mode.",
|
||||
},
|
||||
});
|
||||
insertTask({
|
||||
id: "T02",
|
||||
sliceId: "S01",
|
||||
milestoneId: "M001",
|
||||
title: "Verify recovery",
|
||||
status: "pending",
|
||||
planning: {
|
||||
description: "Prove task plan files remain present for recovery.",
|
||||
estimate: "20m",
|
||||
files: ["src/resources/extensions/gsd/auto-recovery.ts"],
|
||||
verify: "node --test auto-recovery.test.ts",
|
||||
inputs: ["src/resources/extensions/gsd/auto-recovery.ts"],
|
||||
expectedOutput: ["src/resources/extensions/gsd/tests/auto-recovery.test.ts"],
|
||||
observabilityImpact: "Missing plan files surface as explicit verification failures.",
|
||||
},
|
||||
});
|
||||
|
||||
const rendered = await renderPlanFromDb(base, "M001", "S01");
|
||||
assert.ok(existsSync(rendered.planPath), "renderPlanFromDb should write the slice plan");
|
||||
assert.equal(rendered.taskPlanPaths.length, 2, "renderPlanFromDb should render one task plan per task");
|
||||
|
||||
const planContent = readFileSync(rendered.planPath, "utf-8");
|
||||
const parsedPlan = parsePlan(planContent);
|
||||
assert.equal(parsedPlan.tasks.length, 2, "rendered slice plan should parse into task entries");
|
||||
|
||||
const taskPlanContent = readFileSync(rendered.taskPlanPaths[0], "utf-8");
|
||||
const taskPlan = parseTaskPlanFile(taskPlanContent);
|
||||
assert.deepEqual(taskPlan.frontmatter.skills_used, [], "rendered task plans should use conservative empty skills_used");
|
||||
|
||||
const result = verifyExpectedArtifact("plan-slice", "M001/S01", base);
|
||||
assert.equal(result, true, "plan-slice verification should pass when rendered task plan files exist");
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test("verifyExpectedArtifact plan-slice fails after deleting a rendered task plan file", async () => {
|
||||
const base = makeTmpBase();
|
||||
const dbPath = join(base, ".gsd", "gsd.db");
|
||||
openDatabase(dbPath);
|
||||
try {
|
||||
insertMilestone({ id: "M001", title: "Milestone", status: "active" });
|
||||
insertSlice({
|
||||
id: "S01",
|
||||
milestoneId: "M001",
|
||||
title: "Rendered slice",
|
||||
status: "pending",
|
||||
demo: "Rendered plan artifacts exist.",
|
||||
planning: {
|
||||
goal: "Render plans from DB rows.",
|
||||
successCriteria: "- Slice plan parses\n- Task plan files exist on disk",
|
||||
proofLevel: "integration",
|
||||
integrationClosure: "DB rows are the source of truth for PLAN artifacts.",
|
||||
observabilityImpact: "- Recovery verification fails if a task plan file is missing",
|
||||
},
|
||||
});
|
||||
insertTask({
|
||||
id: "T01",
|
||||
sliceId: "S01",
|
||||
milestoneId: "M001",
|
||||
title: "Render plan",
|
||||
status: "pending",
|
||||
planning: {
|
||||
description: "Create the slice plan from DB state.",
|
||||
estimate: "30m",
|
||||
files: ["src/resources/extensions/gsd/markdown-renderer.ts"],
|
||||
verify: "node --test markdown-renderer.test.ts",
|
||||
inputs: ["src/resources/extensions/gsd/gsd-db.ts"],
|
||||
expectedOutput: ["src/resources/extensions/gsd/tests/markdown-renderer.test.ts"],
|
||||
observabilityImpact: "Renderer tests cover the failure mode.",
|
||||
},
|
||||
});
|
||||
insertTask({
|
||||
id: "T02",
|
||||
sliceId: "S01",
|
||||
milestoneId: "M001",
|
||||
title: "Verify recovery",
|
||||
status: "pending",
|
||||
planning: {
|
||||
description: "Prove task plan files remain present for recovery.",
|
||||
estimate: "20m",
|
||||
files: ["src/resources/extensions/gsd/auto-recovery.ts"],
|
||||
verify: "node --test auto-recovery.test.ts",
|
||||
inputs: ["src/resources/extensions/gsd/auto-recovery.ts"],
|
||||
expectedOutput: ["src/resources/extensions/gsd/tests/auto-recovery.test.ts"],
|
||||
observabilityImpact: "Missing plan files surface as explicit verification failures.",
|
||||
},
|
||||
});
|
||||
|
||||
const rendered = await renderPlanFromDb(base, "M001", "S01");
|
||||
rmSync(rendered.taskPlanPaths[1]);
|
||||
|
||||
const result = verifyExpectedArtifact("plan-slice", "M001/S01", base);
|
||||
assert.equal(result, false, "plan-slice verification should fail when a rendered task plan file is removed");
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
// ─── selfHealRuntimeRecords — worktree base path (#769) ──────────────────
|
||||
|
||||
test("selfHealRuntimeRecords clears stale dispatched records (#769)", async () => {
|
||||
|
|
|
|||
|
|
@ -158,7 +158,7 @@ async function main(): Promise<void> {
|
|||
{
|
||||
const { deriveState, isMilestoneComplete } = await import("../state.ts");
|
||||
const { invalidateAllCaches: invalidateAllCachesDynamic } = await import("../cache.ts");
|
||||
const { parseRoadmap } = await import("../files.ts");
|
||||
const { parseRoadmap } = await import("../parsers-legacy.ts");
|
||||
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -125,9 +125,9 @@ console.log('\n=== complete-slice: schema v6 migration ===');
|
|||
|
||||
const adapter = _getAdapter()!;
|
||||
|
||||
// Verify schema version is 7
|
||||
// Verify schema version is current (v10 after M001 planning migrations)
|
||||
const versionRow = adapter.prepare('SELECT MAX(version) as v FROM schema_version').get();
|
||||
assertEq(versionRow?.['v'], 7, 'schema version should be 7');
|
||||
assertEq(versionRow?.['v'], 10, 'schema version should be 10');
|
||||
|
||||
// Verify slices table has full_summary_md and full_uat_md columns
|
||||
const cols = adapter.prepare("PRAGMA table_info(slices)").all();
|
||||
|
|
|
|||
|
|
@ -109,9 +109,9 @@ console.log('\n=== complete-task: schema v5 migration ===');
|
|||
|
||||
const adapter = _getAdapter()!;
|
||||
|
||||
// Verify schema version is 7
|
||||
// Verify schema version is current (v10 after M001 planning migrations)
|
||||
const versionRow = adapter.prepare('SELECT MAX(version) as v FROM schema_version').get();
|
||||
assertEq(versionRow?.['v'], 7, 'schema version should be 7');
|
||||
assertEq(versionRow?.['v'], 10, 'schema version should be 10');
|
||||
|
||||
// Verify all 4 new tables exist
|
||||
const tables = adapter.prepare(
|
||||
|
|
|
|||
|
|
@ -194,8 +194,6 @@ function makeMockDeps(overrides?: Partial<LoopDeps>): LoopDeps & { callLog: stri
|
|||
runPreDispatchHooks: () => ({ firedHooks: [], action: "proceed" }),
|
||||
getPriorSliceCompletionBlocker: () => null,
|
||||
getMainBranch: () => "main",
|
||||
collectObservabilityWarnings: async () => [],
|
||||
buildObservabilityRepairBlock: () => null,
|
||||
closeoutUnit: async () => {},
|
||||
verifyExpectedArtifact: () => true,
|
||||
clearUnitRuntimeRecord: () => {},
|
||||
|
|
|
|||
|
|
@ -738,6 +738,14 @@ async function main(): Promise<void> {
|
|||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'First Task', status: 'pending' });
|
||||
insertTask({ id: 'T02', sliceId: 'S01', milestoneId: 'M001', title: 'Done Task', status: 'complete' });
|
||||
|
||||
// Seed the replan_triggered_at column — DB path uses column instead of disk file
|
||||
const { _getAdapter } = await import('../gsd-db.ts');
|
||||
const adapter = _getAdapter();
|
||||
adapter!.prepare(
|
||||
"UPDATE slices SET replan_triggered_at = :ts WHERE milestone_id = :mid AND id = :sid",
|
||||
).run({ ":ts": new Date().toISOString(), ":mid": "M001", ":sid": "S01" });
|
||||
|
||||
|
||||
invalidateStateCache();
|
||||
const dbState = await deriveStateFromDb(base);
|
||||
|
||||
|
|
|
|||
|
|
@ -4,58 +4,92 @@ import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
|
|||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
import { getPriorSliceCompletionBlocker } from "../dispatch-guard.ts";
|
||||
import { openDatabase, closeDatabase, insertMilestone, insertSlice } from "../gsd-db.ts";
|
||||
|
||||
/** Helper: create temp dir and open an in-dir DB for dispatch-guard tests */
|
||||
function setupRepo(): string {
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-dispatch-guard-"));
|
||||
mkdirSync(join(repo, ".gsd"), { recursive: true });
|
||||
openDatabase(join(repo, ".gsd", "gsd.db"));
|
||||
return repo;
|
||||
}
|
||||
|
||||
/** Helper: tear down repo (close DB then remove dir) */
|
||||
function teardownRepo(repo: string): void {
|
||||
closeDatabase();
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
test("dispatch guard blocks when prior milestone has incomplete slices", () => {
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-dispatch-guard-"));
|
||||
const repo = setupRepo();
|
||||
try {
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M002"), { recursive: true });
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M003"), { recursive: true });
|
||||
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M002", "M002-ROADMAP.md"),
|
||||
"# M002: Previous\n\n## Slices\n- [x] **S01: Done** `risk:low` `depends:[]`\n- [ ] **S02: Pending** `risk:low` `depends:[S01]`\n");
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M003", "M003-ROADMAP.md"),
|
||||
"# M003: Current\n\n## Slices\n- [ ] **S01: First** `risk:low` `depends:[]`\n- [ ] **S02: Second** `risk:low` `depends:[S01]`\n");
|
||||
// Seed DB: M002 with S01 complete, S02 pending
|
||||
insertMilestone({ id: "M002", title: "Previous" });
|
||||
insertSlice({ id: "S01", milestoneId: "M002", title: "Done", status: "complete", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M002", title: "Pending", status: "pending", depends: ["S01"], sequence: 2 });
|
||||
|
||||
// M003 with two pending slices
|
||||
insertMilestone({ id: "M003", title: "Current" });
|
||||
insertSlice({ id: "S01", milestoneId: "M003", title: "First", status: "pending", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M003", title: "Second", status: "pending", depends: ["S01"], sequence: 2 });
|
||||
|
||||
// Need ROADMAP files for milestone discovery (findMilestoneIds reads disk)
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M002", "M002-ROADMAP.md"), "# M002\n");
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M003", "M003-ROADMAP.md"), "# M003\n");
|
||||
|
||||
assert.equal(
|
||||
getPriorSliceCompletionBlocker(repo, "main", "plan-slice", "M003/S01"),
|
||||
"Cannot dispatch plan-slice M003/S01: earlier slice M002/S02 is not complete.",
|
||||
);
|
||||
} finally {
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
teardownRepo(repo);
|
||||
}
|
||||
});
|
||||
|
||||
test("dispatch guard blocks later slice in same milestone when earlier incomplete", () => {
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-dispatch-guard-"));
|
||||
const repo = setupRepo();
|
||||
try {
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M002"), { recursive: true });
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M003"), { recursive: true });
|
||||
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M002", "M002-ROADMAP.md"),
|
||||
"# M002: Previous\n\n## Slices\n- [x] **S01: Done** `risk:low` `depends:[]`\n- [x] **S02: Done** `risk:low` `depends:[S01]`\n");
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M003", "M003-ROADMAP.md"),
|
||||
"# M003: Current\n\n## Slices\n- [ ] **S01: First** `risk:low` `depends:[]`\n- [ ] **S02: Second** `risk:low` `depends:[S01]`\n");
|
||||
insertMilestone({ id: "M002", title: "Previous" });
|
||||
insertSlice({ id: "S01", milestoneId: "M002", title: "Done", status: "complete", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M002", title: "Done", status: "complete", depends: ["S01"], sequence: 2 });
|
||||
|
||||
insertMilestone({ id: "M003", title: "Current" });
|
||||
insertSlice({ id: "S01", milestoneId: "M003", title: "First", status: "pending", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M003", title: "Second", status: "pending", depends: ["S01"], sequence: 2 });
|
||||
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M002", "M002-ROADMAP.md"), "# M002\n");
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M003", "M003-ROADMAP.md"), "# M003\n");
|
||||
|
||||
assert.equal(
|
||||
getPriorSliceCompletionBlocker(repo, "main", "execute-task", "M003/S02/T01"),
|
||||
"Cannot dispatch execute-task M003/S02/T01: dependency slice M003/S01 is not complete.",
|
||||
);
|
||||
} finally {
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
teardownRepo(repo);
|
||||
}
|
||||
});
|
||||
|
||||
test("dispatch guard allows dispatch when all earlier slices complete", () => {
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-dispatch-guard-"));
|
||||
const repo = setupRepo();
|
||||
try {
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M003"), { recursive: true });
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M003", "M003-ROADMAP.md"),
|
||||
"# M003: Current\n\n## Slices\n- [x] **S01: First** `risk:low` `depends:[]`\n- [ ] **S02: Second** `risk:low` `depends:[S01]`\n");
|
||||
|
||||
insertMilestone({ id: "M003", title: "Current" });
|
||||
insertSlice({ id: "S01", milestoneId: "M003", title: "First", status: "complete", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M003", title: "Second", status: "pending", depends: ["S01"], sequence: 2 });
|
||||
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M003", "M003-ROADMAP.md"), "# M003\n");
|
||||
|
||||
assert.equal(getPriorSliceCompletionBlocker(repo, "main", "execute-task", "M003/S02/T01"), null);
|
||||
assert.equal(getPriorSliceCompletionBlocker(repo, "main", "plan-milestone", "M003"), null);
|
||||
} finally {
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
teardownRepo(repo);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
@ -63,17 +97,19 @@ test("dispatch guard unblocks slice when positionally-earlier slice depends on i
|
|||
// S05 depends on S06, but S05 appears first positionally.
|
||||
// Old behavior: S06 blocked because S05 (positionally earlier) is incomplete.
|
||||
// Fixed behavior: S06 has no unmet dependencies, so it can dispatch.
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-dispatch-guard-"));
|
||||
const repo = setupRepo();
|
||||
try {
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true });
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"),
|
||||
"# M001: Test\n\n## Slices\n" +
|
||||
"- [x] **S01: Setup** `risk:low` `depends:[]`\n" +
|
||||
"- [x] **S02: Core** `risk:low` `depends:[S01]`\n" +
|
||||
"- [x] **S03: API** `risk:low` `depends:[S02]`\n" +
|
||||
"- [x] **S04: Auth** `risk:low` `depends:[S03]`\n" +
|
||||
"- [ ] **S05: Integration** `risk:high` `depends:[S04,S06]`\n" +
|
||||
"- [ ] **S06: Data Layer** `risk:medium` `depends:[S04]`\n");
|
||||
|
||||
insertMilestone({ id: "M001", title: "Test" });
|
||||
insertSlice({ id: "S01", milestoneId: "M001", title: "Setup", status: "complete", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M001", title: "Core", status: "complete", depends: ["S01"], sequence: 2 });
|
||||
insertSlice({ id: "S03", milestoneId: "M001", title: "API", status: "complete", depends: ["S02"], sequence: 3 });
|
||||
insertSlice({ id: "S04", milestoneId: "M001", title: "Auth", status: "complete", depends: ["S03"], sequence: 4 });
|
||||
insertSlice({ id: "S05", milestoneId: "M001", title: "Integration", status: "pending", depends: ["S04", "S06"], sequence: 5 });
|
||||
insertSlice({ id: "S06", milestoneId: "M001", title: "Data Layer", status: "pending", depends: ["S04"], sequence: 6 });
|
||||
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n");
|
||||
|
||||
// S06 depends only on S04 (complete) — should be unblocked
|
||||
assert.equal(
|
||||
|
|
@ -87,19 +123,21 @@ test("dispatch guard unblocks slice when positionally-earlier slice depends on i
|
|||
"Cannot dispatch plan-slice M001/S05: dependency slice M001/S06 is not complete.",
|
||||
);
|
||||
} finally {
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
teardownRepo(repo);
|
||||
}
|
||||
});
|
||||
|
||||
test("dispatch guard falls back to positional ordering when no dependencies declared", () => {
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-dispatch-guard-"));
|
||||
const repo = setupRepo();
|
||||
try {
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true });
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"),
|
||||
"# M001: Test\n\n## Slices\n" +
|
||||
"- [x] **S01: First** `risk:low` `depends:[]`\n" +
|
||||
"- [ ] **S02: Second** `risk:low` `depends:[]`\n" +
|
||||
"- [ ] **S03: Third** `risk:low` `depends:[]`\n");
|
||||
|
||||
insertMilestone({ id: "M001", title: "Test" });
|
||||
insertSlice({ id: "S01", milestoneId: "M001", title: "First", status: "complete", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M001", title: "Second", status: "pending", depends: [], sequence: 2 });
|
||||
insertSlice({ id: "S03", milestoneId: "M001", title: "Third", status: "pending", depends: [], sequence: 3 });
|
||||
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n");
|
||||
|
||||
// S03 has no dependencies — positional fallback blocks on S02
|
||||
assert.equal(
|
||||
|
|
@ -113,20 +151,22 @@ test("dispatch guard falls back to positional ordering when no dependencies decl
|
|||
null,
|
||||
);
|
||||
} finally {
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
teardownRepo(repo);
|
||||
}
|
||||
});
|
||||
|
||||
test("dispatch guard allows slice with all declared dependencies complete", () => {
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-dispatch-guard-"));
|
||||
const repo = setupRepo();
|
||||
try {
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true });
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"),
|
||||
"# M001: Test\n\n## Slices\n" +
|
||||
"- [x] **S01: Setup** `risk:low` `depends:[]`\n" +
|
||||
"- [x] **S02: Core** `risk:low` `depends:[S01]`\n" +
|
||||
"- [ ] **S03: Feature A** `risk:low` `depends:[S01,S02]`\n" +
|
||||
"- [ ] **S04: Feature B** `risk:low` `depends:[S01]`\n");
|
||||
|
||||
insertMilestone({ id: "M001", title: "Test" });
|
||||
insertSlice({ id: "S01", milestoneId: "M001", title: "Setup", status: "complete", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M001", title: "Core", status: "complete", depends: ["S01"], sequence: 2 });
|
||||
insertSlice({ id: "S03", milestoneId: "M001", title: "Feature A", status: "pending", depends: ["S01", "S02"], sequence: 3 });
|
||||
insertSlice({ id: "S04", milestoneId: "M001", title: "Feature B", status: "pending", depends: ["S01"], sequence: 4 });
|
||||
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n");
|
||||
|
||||
// S03 depends on S01 (done) and S02 (done) — unblocked
|
||||
assert.equal(
|
||||
|
|
@ -140,28 +180,31 @@ test("dispatch guard allows slice with all declared dependencies complete", () =
|
|||
null,
|
||||
);
|
||||
} finally {
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
teardownRepo(repo);
|
||||
}
|
||||
});
|
||||
|
||||
test("dispatch guard skips completed milestone with SUMMARY even if it has unchecked remediation slices (#1716)", () => {
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-dispatch-guard-"));
|
||||
const repo = setupRepo();
|
||||
try {
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true });
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M002"), { recursive: true });
|
||||
|
||||
// M001 is complete (has SUMMARY) but has unchecked remediation slices
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"),
|
||||
"# M001: Previous\n\n## Slices\n" +
|
||||
"- [x] **S01: Core** `risk:low` `depends:[]`\n" +
|
||||
"- [x] **S02: Tests** `risk:low` `depends:[S01]`\n" +
|
||||
"- [ ] **S03-R: Remediation** `risk:low` `depends:[S02]`\n" +
|
||||
"- [ ] **S04-R: Remediation 2** `risk:low` `depends:[S02]`\n");
|
||||
// M001 is complete (has SUMMARY) but has unchecked remediation slices in DB
|
||||
insertMilestone({ id: "M001", title: "Previous" });
|
||||
insertSlice({ id: "S01", milestoneId: "M001", title: "Core", status: "complete", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M001", title: "Tests", status: "complete", depends: ["S01"], sequence: 2 });
|
||||
insertSlice({ id: "S03-R", milestoneId: "M001", title: "Remediation", status: "pending", depends: ["S02"], sequence: 3 });
|
||||
insertSlice({ id: "S04-R", milestoneId: "M001", title: "Remediation 2", status: "pending", depends: ["S02"], sequence: 4 });
|
||||
|
||||
insertMilestone({ id: "M002", title: "Current" });
|
||||
insertSlice({ id: "S01", milestoneId: "M002", title: "Start", status: "pending", depends: [], sequence: 1 });
|
||||
|
||||
// M001 SUMMARY on disk triggers skip
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n");
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-SUMMARY.md"),
|
||||
"---\nstatus: complete\n---\n# M001 Summary\nDone.\n");
|
||||
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M002", "M002-ROADMAP.md"),
|
||||
"# M002: Current\n\n## Slices\n- [ ] **S01: Start** `risk:low` `depends:[]`\n");
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M002", "M002-ROADMAP.md"), "# M002\n");
|
||||
|
||||
// M001 has SUMMARY — should be skipped, not block M002/S01
|
||||
assert.equal(
|
||||
|
|
@ -169,19 +212,23 @@ test("dispatch guard skips completed milestone with SUMMARY even if it has unche
|
|||
null,
|
||||
);
|
||||
} finally {
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
teardownRepo(repo);
|
||||
}
|
||||
});
|
||||
|
||||
test("dispatch guard works without git repo", () => {
|
||||
const repo = mkdtempSync(join(tmpdir(), "gsd-dispatch-guard-nogit-"));
|
||||
const repo = setupRepo();
|
||||
try {
|
||||
mkdirSync(join(repo, ".gsd", "milestones", "M001"), { recursive: true });
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"),
|
||||
"# M001: Test\n\n## Slices\n- [x] **S01: Done** `risk:low` `depends:[]`\n- [ ] **S02: Pending** `risk:low` `depends:[S01]`\n");
|
||||
|
||||
insertMilestone({ id: "M001", title: "Test" });
|
||||
insertSlice({ id: "S01", milestoneId: "M001", title: "Done", status: "complete", depends: [], sequence: 1 });
|
||||
insertSlice({ id: "S02", milestoneId: "M001", title: "Pending", status: "pending", depends: ["S01"], sequence: 2 });
|
||||
|
||||
writeFileSync(join(repo, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n");
|
||||
|
||||
assert.equal(getPriorSliceCompletionBlocker(repo, "main", "plan-slice", "M001/S02"), null);
|
||||
} finally {
|
||||
rmSync(repo, { recursive: true, force: true });
|
||||
teardownRepo(repo);
|
||||
}
|
||||
});
|
||||
|
|
|
|||
290
src/resources/extensions/gsd/tests/flag-file-db.test.ts
Normal file
290
src/resources/extensions/gsd/tests/flag-file-db.test.ts
Normal file
|
|
@ -0,0 +1,290 @@
|
|||
/**
|
||||
* flag-file-db.test.ts — Verify that REPLAN.md and REPLAN-TRIGGER.md
|
||||
* flag-file detection in deriveStateFromDb() works from DB-only data
|
||||
* (no disk flag files needed when DB is seeded).
|
||||
*
|
||||
* Semantics:
|
||||
* - blocker_discovered on a completed task → replanning-slice (unless loop-protected)
|
||||
* - replan_triggered_at column on slice → replanning-slice (unless loop-protected)
|
||||
* - Loop protection: replan_history entries for the slice → skip replanning
|
||||
*/
|
||||
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { deriveStateFromDb, invalidateStateCache } from '../state.ts';
|
||||
import {
|
||||
openDatabase,
|
||||
closeDatabase,
|
||||
isDbAvailable,
|
||||
insertMilestone,
|
||||
insertSlice,
|
||||
insertTask,
|
||||
insertReplanHistory,
|
||||
_getAdapter,
|
||||
} from '../gsd-db.ts';
|
||||
import { createTestContext } from './test-helpers.ts';
|
||||
|
||||
const { assertEq, assertTrue, report } = createTestContext();
|
||||
|
||||
// ─── Fixture Helpers ───────────────────────────────────────────────────────
|
||||
|
||||
function createFixtureBase(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-flag-file-db-'));
|
||||
mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true });
|
||||
return base;
|
||||
}
|
||||
|
||||
function writeFile(base: string, relativePath: string, content: string): void {
|
||||
const full = join(base, '.gsd', relativePath);
|
||||
mkdirSync(join(full, '..'), { recursive: true });
|
||||
writeFileSync(full, content);
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
const ROADMAP_CONTENT = `# M001: Flag-File DB Test
|
||||
|
||||
**Vision:** Test flag-file detection via DB.
|
||||
|
||||
## Slices
|
||||
|
||||
- [ ] **S01: Test Slice** \`risk:low\` \`depends:[]\`
|
||||
> After this: done.
|
||||
`;
|
||||
|
||||
const PLAN_CONTENT = `# S01: Test Slice
|
||||
|
||||
**Goal:** Test replanning detection.
|
||||
**Demo:** Tests pass.
|
||||
|
||||
## Tasks
|
||||
|
||||
- [x] **T01: Done Task** \`est:10m\`
|
||||
Already done.
|
||||
|
||||
- [ ] **T02: Active Task** \`est:10m\`
|
||||
Current task.
|
||||
`;
|
||||
|
||||
// Minimal task plan file content — deriveStateFromDb checks the tasks dir has .md files
|
||||
const TASK_PLAN_STUB = `# T02: Active Task\n\nDo stuff.\n`;
|
||||
const TASK_SUMMARY_STUB = `---\nblocker_discovered: false\n---\n# T01 Summary\nDone.\n`;
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
async function main(): Promise<void> {
|
||||
|
||||
// ─── Test 1: blocker_discovered + no replan_history → replanning-slice ──
|
||||
console.log('\n=== flag-file-db: blocker + no history → replanning ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
// Write disk files needed by deriveStateFromDb (roadmap check, task dir check)
|
||||
writeFile(base, 'milestones/M001/M001-ROADMAP.md', ROADMAP_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/S01-PLAN.md', PLAN_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/tasks/T02-PLAN.md', TASK_PLAN_STUB);
|
||||
|
||||
openDatabase(':memory:');
|
||||
assertTrue(isDbAvailable(), 'test1: DB is available');
|
||||
|
||||
insertMilestone({ id: 'M001', title: 'Flag-File DB Test', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Test Slice', status: 'active', risk: 'low', depends: [] });
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'Done Task', status: 'complete', blockerDiscovered: true });
|
||||
insertTask({ id: 'T02', sliceId: 'S01', milestoneId: 'M001', title: 'Active Task', status: 'pending' });
|
||||
|
||||
// No replan_history entries, no disk REPLAN.md — should trigger replanning
|
||||
invalidateStateCache();
|
||||
const state = await deriveStateFromDb(base);
|
||||
|
||||
assertEq(state.phase, 'replanning-slice', 'test1: phase is replanning-slice');
|
||||
assertTrue(state.blockers.length > 0, 'test1: has blockers');
|
||||
assertTrue(state.blockers[0]?.includes('blocker'), 'test1: blocker message mentions blocker');
|
||||
|
||||
closeDatabase();
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 2: blocker_discovered + replan_history exists → loop protection → executing ──
|
||||
console.log('\n=== flag-file-db: blocker + history → loop protection ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
writeFile(base, 'milestones/M001/M001-ROADMAP.md', ROADMAP_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/S01-PLAN.md', PLAN_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/tasks/T02-PLAN.md', TASK_PLAN_STUB);
|
||||
|
||||
openDatabase(':memory:');
|
||||
|
||||
insertMilestone({ id: 'M001', title: 'Flag-File DB Test', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Test Slice', status: 'active', risk: 'low', depends: [] });
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'Done Task', status: 'complete', blockerDiscovered: true });
|
||||
insertTask({ id: 'T02', sliceId: 'S01', milestoneId: 'M001', title: 'Active Task', status: 'pending' });
|
||||
|
||||
// Insert replan_history entry — loop protection should kick in
|
||||
insertReplanHistory({
|
||||
milestoneId: 'M001',
|
||||
sliceId: 'S01',
|
||||
summary: 'Replan already completed for this slice',
|
||||
});
|
||||
|
||||
invalidateStateCache();
|
||||
const state = await deriveStateFromDb(base);
|
||||
|
||||
assertEq(state.phase, 'executing', 'test2: phase is executing (loop protection)');
|
||||
|
||||
closeDatabase();
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 3: replan_triggered_at set + no replan_history → replanning-slice ──
|
||||
console.log('\n=== flag-file-db: trigger column + no history → replanning ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
writeFile(base, 'milestones/M001/M001-ROADMAP.md', ROADMAP_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/S01-PLAN.md', PLAN_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/tasks/T02-PLAN.md', TASK_PLAN_STUB);
|
||||
|
||||
openDatabase(':memory:');
|
||||
|
||||
insertMilestone({ id: 'M001', title: 'Flag-File DB Test', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Test Slice', status: 'active', risk: 'low', depends: [] });
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'Done Task', status: 'complete' });
|
||||
insertTask({ id: 'T02', sliceId: 'S01', milestoneId: 'M001', title: 'Active Task', status: 'pending' });
|
||||
|
||||
// Set replan_triggered_at directly via SQL (simulating triage-resolution.ts writing it)
|
||||
const adapter = _getAdapter();
|
||||
adapter!.prepare(
|
||||
"UPDATE slices SET replan_triggered_at = :ts WHERE milestone_id = :mid AND id = :sid",
|
||||
).run({ ":ts": new Date().toISOString(), ":mid": "M001", ":sid": "S01" });
|
||||
|
||||
invalidateStateCache();
|
||||
const state = await deriveStateFromDb(base);
|
||||
|
||||
assertEq(state.phase, 'replanning-slice', 'test3: phase is replanning-slice');
|
||||
assertTrue(state.blockers.length > 0, 'test3: has blockers');
|
||||
assertTrue(state.blockers[0]?.includes('Triage replan trigger'), 'test3: blocker message mentions triage trigger');
|
||||
|
||||
closeDatabase();
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 4: replan_triggered_at set + replan_history exists → loop protection ──
|
||||
console.log('\n=== flag-file-db: trigger column + history → loop protection ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
writeFile(base, 'milestones/M001/M001-ROADMAP.md', ROADMAP_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/S01-PLAN.md', PLAN_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/tasks/T02-PLAN.md', TASK_PLAN_STUB);
|
||||
|
||||
openDatabase(':memory:');
|
||||
|
||||
insertMilestone({ id: 'M001', title: 'Flag-File DB Test', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Test Slice', status: 'active', risk: 'low', depends: [] });
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'Done Task', status: 'complete' });
|
||||
insertTask({ id: 'T02', sliceId: 'S01', milestoneId: 'M001', title: 'Active Task', status: 'pending' });
|
||||
|
||||
// Set trigger column
|
||||
const adapter = _getAdapter();
|
||||
adapter!.prepare(
|
||||
"UPDATE slices SET replan_triggered_at = :ts WHERE milestone_id = :mid AND id = :sid",
|
||||
).run({ ":ts": new Date().toISOString(), ":mid": "M001", ":sid": "S01" });
|
||||
|
||||
// Also add replan_history — loop protection should prevent replanning
|
||||
insertReplanHistory({
|
||||
milestoneId: 'M001',
|
||||
sliceId: 'S01',
|
||||
summary: 'Replan already done',
|
||||
});
|
||||
|
||||
invalidateStateCache();
|
||||
const state = await deriveStateFromDb(base);
|
||||
|
||||
assertEq(state.phase, 'executing', 'test4: phase is executing (loop protection)');
|
||||
|
||||
closeDatabase();
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Test 5: no blocker, no trigger → phase is executing ──────────────
|
||||
console.log('\n=== flag-file-db: no blocker, no trigger → executing ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
writeFile(base, 'milestones/M001/M001-ROADMAP.md', ROADMAP_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/S01-PLAN.md', PLAN_CONTENT);
|
||||
writeFile(base, 'milestones/M001/slices/S01/tasks/T02-PLAN.md', TASK_PLAN_STUB);
|
||||
|
||||
openDatabase(':memory:');
|
||||
|
||||
insertMilestone({ id: 'M001', title: 'Flag-File DB Test', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Test Slice', status: 'active', risk: 'low', depends: [] });
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'Done Task', status: 'complete' });
|
||||
insertTask({ id: 'T02', sliceId: 'S01', milestoneId: 'M001', title: 'Active Task', status: 'pending' });
|
||||
|
||||
// No blocker, no trigger, no replan_history — normal executing
|
||||
invalidateStateCache();
|
||||
const state = await deriveStateFromDb(base);
|
||||
|
||||
assertEq(state.phase, 'executing', 'test5: phase is executing');
|
||||
assertEq(state.activeTask?.id, 'T02', 'test5: activeTask is T02');
|
||||
assertEq(state.blockers.length, 0, 'test5: no blockers');
|
||||
|
||||
closeDatabase();
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Diagnostic test: DB column inspection ──────────────────────────
|
||||
console.log('\n=== flag-file-db: replan_triggered_at column is queryable ===');
|
||||
{
|
||||
openDatabase(':memory:');
|
||||
|
||||
insertMilestone({ id: 'M001', title: 'Diagnostic', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Test', status: 'active', risk: 'low', depends: [] });
|
||||
|
||||
// Initially null
|
||||
const adapter = _getAdapter();
|
||||
const before = adapter!.prepare(
|
||||
"SELECT id, replan_triggered_at FROM slices WHERE milestone_id = :mid",
|
||||
).get({ ":mid": "M001" }) as Record<string, unknown>;
|
||||
assertEq(before["replan_triggered_at"], null, 'diagnostic: replan_triggered_at initially null');
|
||||
|
||||
// After setting
|
||||
adapter!.prepare(
|
||||
"UPDATE slices SET replan_triggered_at = :ts WHERE milestone_id = :mid AND id = :sid",
|
||||
).run({ ":ts": "2025-01-01T00:00:00Z", ":mid": "M001", ":sid": "S01" });
|
||||
|
||||
const after = adapter!.prepare(
|
||||
"SELECT id, replan_triggered_at FROM slices WHERE milestone_id = :mid",
|
||||
).get({ ":mid": "M001" }) as Record<string, unknown>;
|
||||
assertEq(after["replan_triggered_at"], "2025-01-01T00:00:00Z", 'diagnostic: replan_triggered_at is set');
|
||||
|
||||
closeDatabase();
|
||||
}
|
||||
|
||||
report();
|
||||
}
|
||||
|
||||
main();
|
||||
|
|
@ -66,7 +66,7 @@ console.log('\n=== gsd-db: fresh DB schema init (memory) ===');
|
|||
// Check schema_version table
|
||||
const adapter = _getAdapter()!;
|
||||
const version = adapter.prepare('SELECT MAX(version) as version FROM schema_version').get();
|
||||
assertEq(version?.['version'], 7, 'schema version should be 7');
|
||||
assertEq(version?.['version'], 10, 'schema version should be 10');
|
||||
|
||||
// Check tables exist by querying them
|
||||
const dRows = adapter.prepare('SELECT count(*) as cnt FROM decisions').get();
|
||||
|
|
|
|||
|
|
@ -16,6 +16,9 @@ import {
|
|||
insertMilestone,
|
||||
insertSlice,
|
||||
insertTask,
|
||||
getMilestone,
|
||||
getSlice,
|
||||
getTask,
|
||||
} from '../gsd-db.ts';
|
||||
import { migrateHierarchyToDb } from '../md-importer.ts';
|
||||
import { deriveStateFromDb, invalidateStateCache } from '../state.ts';
|
||||
|
|
@ -47,6 +50,12 @@ const ROADMAP_M001 = `# M001: Recovery Test
|
|||
|
||||
**Vision:** Test recovery round-trip.
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- All recovery tests pass
|
||||
- State matches after round-trip
|
||||
|
||||
|
||||
## Slices
|
||||
|
||||
- [x] **S01: Setup** \`risk:low\` \`depends:[]\`
|
||||
|
|
@ -54,6 +63,12 @@ const ROADMAP_M001 = `# M001: Recovery Test
|
|||
|
||||
- [ ] **S02: Core** \`risk:medium\` \`depends:[S01]\`
|
||||
> After this: Core done.
|
||||
|
||||
## Boundary Map
|
||||
|
||||
| From | To | Produces | Consumes |
|
||||
|------|-----|----------|----------|
|
||||
| S01 | S02 | setup artifacts | setup artifacts |
|
||||
`;
|
||||
|
||||
const PLAN_S01_COMPLETE = `---
|
||||
|
|
@ -71,9 +86,13 @@ skills_used: []
|
|||
|
||||
- [x] **T01: Init** \`est:15m\`
|
||||
Initialize things.
|
||||
- Files: \`init.ts\`, \`config.ts\`
|
||||
- Verify: \`node test-init.ts\`
|
||||
|
||||
- [x] **T02: Config** \`est:10m\`
|
||||
Configure things.
|
||||
- Files: \`settings.ts\`
|
||||
- Verify: \`node test-config.ts\`
|
||||
`;
|
||||
|
||||
const PLAN_S02_PARTIAL = `---
|
||||
|
|
@ -91,12 +110,18 @@ skills_used: []
|
|||
|
||||
- [x] **T01: Build** \`est:30m\`
|
||||
Build it.
|
||||
- Files: \`core.ts\`
|
||||
- Verify: \`node test-build.ts\`
|
||||
|
||||
- [ ] **T02: Test** \`est:20m\`
|
||||
Test it.
|
||||
- Files: \`test-core.ts\`, \`helpers.ts\`
|
||||
- Verify: \`npm test\`
|
||||
|
||||
- [ ] **T03: Polish** \`est:15m\`
|
||||
Polish it.
|
||||
- Files: \`polish.ts\`
|
||||
- Verify: \`node test-polish.ts\`
|
||||
`;
|
||||
|
||||
const SUMMARY_S01 = `---
|
||||
|
|
@ -208,6 +233,87 @@ async function main() {
|
|||
}
|
||||
}
|
||||
|
||||
// ─── Test (a2): v8 planning columns populated after recovery ───────────
|
||||
console.log('\n=== recover: v8 planning columns populated ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
writeFile(base, 'milestones/M001/M001-ROADMAP.md', ROADMAP_M001);
|
||||
writeFile(base, 'milestones/M001/slices/S01/S01-PLAN.md', PLAN_S01_COMPLETE);
|
||||
writeFile(base, 'milestones/M001/slices/S01/S01-SUMMARY.md', SUMMARY_S01);
|
||||
writeFile(base, 'milestones/M001/slices/S02/S02-PLAN.md', PLAN_S02_PARTIAL);
|
||||
|
||||
openDatabase(':memory:');
|
||||
migrateHierarchyToDb(base);
|
||||
|
||||
// Milestone planning columns
|
||||
const milestone = getMilestone('M001');
|
||||
assertTrue(milestone !== null, 'v8: milestone exists');
|
||||
assertEq(milestone!.vision, 'Test recovery round-trip.', 'v8: milestone vision populated');
|
||||
assertTrue(milestone!.success_criteria.length >= 2, 'v8: milestone success_criteria has entries');
|
||||
assertEq(milestone!.success_criteria[0], 'All recovery tests pass', 'v8: first success criterion');
|
||||
assertTrue(milestone!.boundary_map_markdown.includes('Boundary Map'), 'v8: boundary_map_markdown populated');
|
||||
assertTrue(milestone!.boundary_map_markdown.includes('S01'), 'v8: boundary_map_markdown has S01');
|
||||
|
||||
// Tool-only fields left empty per D004
|
||||
assertEq(milestone!.key_risks.length, 0, 'v8: key_risks left empty (tool-only per D004)');
|
||||
assertEq(milestone!.requirement_coverage, '', 'v8: requirement_coverage left empty (tool-only per D004)');
|
||||
|
||||
// Slice planning columns
|
||||
const sliceS01 = getSlice('M001', 'S01');
|
||||
assertTrue(sliceS01 !== null, 'v8: slice S01 exists');
|
||||
assertEq(sliceS01!.goal, 'Setup fixtures.', 'v8: S01 goal populated');
|
||||
|
||||
const sliceS02 = getSlice('M001', 'S02');
|
||||
assertTrue(sliceS02 !== null, 'v8: slice S02 exists');
|
||||
assertEq(sliceS02!.goal, 'Build core.', 'v8: S02 goal populated');
|
||||
|
||||
// Slice tool-only fields left empty per D004
|
||||
assertEq(sliceS01!.proof_level, '', 'v8: S01 proof_level left empty (tool-only per D004)');
|
||||
|
||||
// Task planning columns — S01/T01
|
||||
const taskS01T01 = getTask('M001', 'S01', 'T01');
|
||||
assertTrue(taskS01T01 !== null, 'v8: task S01/T01 exists');
|
||||
assertTrue(taskS01T01!.files.length >= 2, 'v8: S01/T01 files populated');
|
||||
assertTrue(taskS01T01!.files.includes('init.ts'), 'v8: S01/T01 files includes init.ts');
|
||||
assertTrue(taskS01T01!.files.includes('config.ts'), 'v8: S01/T01 files includes config.ts');
|
||||
assertEq(taskS01T01!.verify, '`node test-init.ts`', 'v8: S01/T01 verify populated');
|
||||
|
||||
// Task planning columns — S02/T02
|
||||
const taskS02T02 = getTask('M001', 'S02', 'T02');
|
||||
assertTrue(taskS02T02 !== null, 'v8: task S02/T02 exists');
|
||||
assertTrue(taskS02T02!.files.length >= 2, 'v8: S02/T02 files populated');
|
||||
assertTrue(taskS02T02!.files.includes('test-core.ts'), 'v8: S02/T02 files includes test-core.ts');
|
||||
assertEq(taskS02T02!.verify, '`npm test`', 'v8: S02/T02 verify populated');
|
||||
|
||||
// Task with no Files/Verify — not applicable since all fixtures now have them,
|
||||
// but confirm a task from S02 has correct data
|
||||
const taskS02T03 = getTask('M001', 'S02', 'T03');
|
||||
assertTrue(taskS02T03 !== null, 'v8: task S02/T03 exists');
|
||||
assertTrue(taskS02T03!.files.includes('polish.ts'), 'v8: S02/T03 files includes polish.ts');
|
||||
assertEq(taskS02T03!.verify, '`node test-polish.ts`', 'v8: S02/T03 verify populated');
|
||||
|
||||
// Diagnostic: v8 planning columns queryable via SQL
|
||||
const db = _getAdapter()!;
|
||||
const milestoneRow = db.prepare("SELECT vision, success_criteria, boundary_map_markdown FROM milestones WHERE id = 'M001'").get() as any;
|
||||
assertTrue(milestoneRow.vision.length > 0, 'v8-diag: vision column queryable');
|
||||
assertTrue(milestoneRow.boundary_map_markdown.length > 0, 'v8-diag: boundary_map_markdown column queryable');
|
||||
|
||||
const sliceRow = db.prepare("SELECT goal FROM slices WHERE milestone_id = 'M001' AND id = 'S01'").get() as any;
|
||||
assertTrue(sliceRow.goal.length > 0, 'v8-diag: goal column queryable');
|
||||
|
||||
const taskRow = db.prepare("SELECT files, verify FROM tasks WHERE milestone_id = 'M001' AND slice_id = 'S01' AND id = 'T01'").get() as any;
|
||||
assertTrue(taskRow.files.length > 2, 'v8-diag: files column queryable (JSON array)');
|
||||
assertTrue(taskRow.verify.length > 0, 'v8-diag: verify column queryable');
|
||||
|
||||
closeDatabase();
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ─── Test (b): Idempotent recovery — double recover ────────────────────
|
||||
console.log('\n=== recover: idempotent — double recovery produces same state ===');
|
||||
{
|
||||
|
|
|
|||
|
|
@ -91,8 +91,6 @@ function makeMockDeps(
|
|||
runPreDispatchHooks: () => ({ firedHooks: [], action: "proceed" }),
|
||||
getPriorSliceCompletionBlocker: () => null,
|
||||
getMainBranch: () => "main",
|
||||
collectObservabilityWarnings: async () => [],
|
||||
buildObservabilityRepairBlock: () => null,
|
||||
closeoutUnit: async () => {},
|
||||
verifyExpectedArtifact: () => true,
|
||||
clearUnitRuntimeRecord: () => {},
|
||||
|
|
|
|||
|
|
@ -22,13 +22,18 @@ import {
|
|||
renderTaskSummary,
|
||||
renderSliceSummary,
|
||||
renderAllFromDb,
|
||||
renderPlanFromDb,
|
||||
renderTaskPlanFromDb,
|
||||
detectStaleRenders,
|
||||
repairStaleRenders,
|
||||
} from '../markdown-renderer.ts';
|
||||
import {
|
||||
parseRoadmap,
|
||||
parsePlan,
|
||||
} from '../parsers-legacy.ts';
|
||||
import {
|
||||
parseSummary,
|
||||
parseTaskPlanFile,
|
||||
clearParseCache,
|
||||
} from '../files.ts';
|
||||
import { clearPathCache, _clearGsdRootCache } from '../paths.ts';
|
||||
|
|
@ -433,6 +438,135 @@ console.log('\n── markdown-renderer: renderPlanCheckboxes bidirectional ─
|
|||
}
|
||||
}
|
||||
|
||||
console.log('\n── markdown-renderer: renderPlanFromDb creates parse-compatible slice plan + task plan files ──');
|
||||
|
||||
{
|
||||
const tmpDir = makeTmpDir();
|
||||
const dbPath = path.join(tmpDir, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
clearAllCaches();
|
||||
|
||||
try {
|
||||
scaffoldDirs(tmpDir, 'M001', ['S02']);
|
||||
|
||||
insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' });
|
||||
insertSlice({
|
||||
id: 'S02',
|
||||
milestoneId: 'M001',
|
||||
title: 'DB-backed planning',
|
||||
status: 'pending',
|
||||
demo: 'Rendered plans exist on disk.',
|
||||
planning: {
|
||||
goal: 'Render slice plans from DB state.',
|
||||
successCriteria: '- Slice plan stays parse-compatible\n- Task plan files are regenerated',
|
||||
proofLevel: 'integration',
|
||||
integrationClosure: 'Wires DB planning rows to markdown artifacts.',
|
||||
observabilityImpact: '- Run renderer contract tests\n- Inspect stale-render diagnostics on mismatch',
|
||||
},
|
||||
});
|
||||
insertTask({
|
||||
id: 'T01',
|
||||
sliceId: 'S02',
|
||||
milestoneId: 'M001',
|
||||
title: 'Render slice plan',
|
||||
status: 'pending',
|
||||
planning: {
|
||||
description: 'Implement the DB-backed slice plan renderer.',
|
||||
estimate: '45m',
|
||||
files: ['src/resources/extensions/gsd/markdown-renderer.ts'],
|
||||
verify: 'node --test markdown-renderer.test.ts',
|
||||
inputs: ['src/resources/extensions/gsd/markdown-renderer.ts'],
|
||||
expectedOutput: ['src/resources/extensions/gsd/tests/markdown-renderer.test.ts'],
|
||||
observabilityImpact: 'Renderer tests cover stale render failure paths.',
|
||||
},
|
||||
});
|
||||
insertTask({
|
||||
id: 'T02',
|
||||
sliceId: 'S02',
|
||||
milestoneId: 'M001',
|
||||
title: 'Render task plan',
|
||||
status: 'pending',
|
||||
planning: {
|
||||
description: 'Emit the task plan file with conservative frontmatter.',
|
||||
estimate: '30m',
|
||||
files: ['src/resources/extensions/gsd/files.ts'],
|
||||
verify: 'node --test auto-recovery.test.ts',
|
||||
inputs: ['src/resources/extensions/gsd/files.ts'],
|
||||
expectedOutput: ['src/resources/extensions/gsd/tests/auto-recovery.test.ts'],
|
||||
observabilityImpact: 'Missing task-plan files fail recovery verification.',
|
||||
},
|
||||
});
|
||||
|
||||
const rendered = await renderPlanFromDb(tmpDir, 'M001', 'S02');
|
||||
assertTrue(fs.existsSync(rendered.planPath), 'slice plan written to disk');
|
||||
assertEq(rendered.taskPlanPaths.length, 2, 'task plan paths returned for each task');
|
||||
assertTrue(rendered.taskPlanPaths.every((p) => fs.existsSync(p)), 'all task plan files written to disk');
|
||||
|
||||
const planContent = fs.readFileSync(rendered.planPath, 'utf-8');
|
||||
clearAllCaches();
|
||||
const parsedPlan = parsePlan(planContent);
|
||||
assertEq(parsedPlan.id, 'S02', 'rendered slice plan parses with correct slice id');
|
||||
assertEq(parsedPlan.goal, 'Render slice plans from DB state.', 'rendered slice plan preserves goal');
|
||||
assertEq(parsedPlan.demo, 'Rendered plans exist on disk.', 'rendered slice plan preserves demo');
|
||||
assertEq(parsedPlan.mustHaves.length, 2, 'rendered slice plan exposes must-haves');
|
||||
assertEq(parsedPlan.tasks.length, 2, 'rendered slice plan exposes all tasks');
|
||||
assertEq(parsedPlan.tasks[0].id, 'T01', 'first task parses correctly');
|
||||
assertTrue(parsedPlan.tasks[0].description.includes('DB-backed slice plan renderer'), 'task description preserved in slice plan');
|
||||
assertEq(parsedPlan.tasks[0].files?.[0], 'src/resources/extensions/gsd/markdown-renderer.ts', 'files list preserved in slice plan');
|
||||
assertEq(parsedPlan.tasks[0].verify, 'node --test markdown-renderer.test.ts', 'verify line preserved in slice plan');
|
||||
|
||||
const planArtifact = getArtifact('milestones/M001/slices/S02/S02-PLAN.md');
|
||||
assertTrue(planArtifact !== null, 'slice plan artifact stored in DB');
|
||||
assertTrue(planArtifact!.full_content.includes('## Tasks'), 'stored plan artifact contains task section');
|
||||
|
||||
const taskPlanPath = path.join(tmpDir, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T01-PLAN.md');
|
||||
const taskPlanContent = fs.readFileSync(taskPlanPath, 'utf-8');
|
||||
const taskPlanFile = parseTaskPlanFile(taskPlanContent);
|
||||
assertEq(taskPlanFile.frontmatter.estimated_steps, 1, 'task plan frontmatter exposes estimated_steps');
|
||||
assertEq(taskPlanFile.frontmatter.estimated_files, 1, 'task plan frontmatter exposes estimated_files');
|
||||
assertEq(taskPlanFile.frontmatter.skills_used.length, 0, 'task plan frontmatter uses conservative empty skills list');
|
||||
assertMatch(taskPlanContent, /^# T01: Render slice plan/m, 'task plan renders task heading');
|
||||
assertMatch(taskPlanContent, /^## Inputs$/m, 'task plan renders Inputs section');
|
||||
assertMatch(taskPlanContent, /^## Expected Output$/m, 'task plan renders Expected Output section');
|
||||
assertMatch(taskPlanContent, /^## Verification$/m, 'task plan renders Verification section');
|
||||
|
||||
const taskArtifact = getArtifact('milestones/M001/slices/S02/tasks/T01-PLAN.md');
|
||||
assertTrue(taskArtifact !== null, 'task plan artifact stored in DB');
|
||||
assertTrue(taskArtifact!.full_content.includes('skills_used: []'), 'stored task plan artifact preserves conservative skills_used');
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanupDir(tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n── markdown-renderer: renderTaskPlanFromDb throws for missing task ──');
|
||||
|
||||
{
|
||||
const tmpDir = makeTmpDir();
|
||||
const dbPath = path.join(tmpDir, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
clearAllCaches();
|
||||
|
||||
try {
|
||||
scaffoldDirs(tmpDir, 'M001', ['S02']);
|
||||
insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' });
|
||||
insertSlice({ id: 'S02', milestoneId: 'M001', title: 'Slice', status: 'pending' });
|
||||
|
||||
let threw = false;
|
||||
try {
|
||||
await renderTaskPlanFromDb(tmpDir, 'M001', 'S02', 'T99');
|
||||
} catch (error) {
|
||||
threw = true;
|
||||
assertMatch(String((error as Error).message), /task M001\/S02\/T99 not found/, 'renderTaskPlanFromDb should fail clearly when task row is missing');
|
||||
}
|
||||
assertTrue(threw, 'renderTaskPlanFromDb throws when the task row is missing');
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanupDir(tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Task Summary Rendering
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
|
|
|||
|
|
@ -384,7 +384,7 @@ console.log('=== md-importer: schema v1→v2 migration ===');
|
|||
openDatabase(':memory:');
|
||||
const adapter = _getAdapter();
|
||||
const version = adapter?.prepare('SELECT MAX(version) as v FROM schema_version').get();
|
||||
assertEq(version?.v, 7, 'new DB should be at schema version 7');
|
||||
assertEq(version?.v, 10, 'new DB should be at schema version 10');
|
||||
|
||||
// Artifacts table should exist
|
||||
const tableCheck = adapter?.prepare("SELECT count(*) as c FROM sqlite_master WHERE type='table' AND name='artifacts'").get();
|
||||
|
|
|
|||
|
|
@ -335,9 +335,9 @@ console.log('\n=== memory-store: schema includes memories table ===');
|
|||
const viewCount = adapter.prepare('SELECT count(*) as cnt FROM active_memories').get();
|
||||
assertEq(viewCount?.['cnt'], 0, 'active_memories view should exist');
|
||||
|
||||
// Verify schema version is 7
|
||||
// Verify schema version is 10 (after M001 planning migrations)
|
||||
const version = adapter.prepare('SELECT MAX(version) as v FROM schema_version').get();
|
||||
assertEq(version?.['v'], 7, 'schema version should be 7');
|
||||
assertEq(version?.['v'], 10, 'schema version should be 10');
|
||||
|
||||
closeDatabase();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@ import { tmpdir } from 'node:os';
|
|||
|
||||
import { writeGSDDirectory } from '../migrate/writer.ts';
|
||||
import { generatePreview } from '../migrate/preview.ts';
|
||||
import { parseRoadmap, parsePlan, parseSummary } from '../files.ts';
|
||||
import { parseRoadmap, parsePlan } from '../parsers-legacy.ts';
|
||||
import { parseSummary } from '../files.ts';
|
||||
import { deriveState } from '../state.ts';
|
||||
import { invalidateAllCaches } from '../cache.ts';
|
||||
import type {
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ import {
|
|||
import {
|
||||
parseRoadmap,
|
||||
parsePlan,
|
||||
} from '../parsers-legacy.ts';
|
||||
import {
|
||||
parseSummary,
|
||||
parseRequirementCounts,
|
||||
} from '../files.ts';
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { parseRoadmap, parsePlan, parseTaskPlanFile, parseSummary, parseContinue, parseRequirementCounts, parseSecretsManifest, formatSecretsManifest } from '../files.ts';
|
||||
import { parseRoadmap, parsePlan } from '../parsers-legacy.ts';
|
||||
import { parseTaskPlanFile, parseSummary, parseContinue, parseRequirementCounts, parseSecretsManifest, formatSecretsManifest } from '../files.ts';
|
||||
import { createTestContext } from './test-helpers.ts';
|
||||
|
||||
const { assertEq, assertTrue, report } = createTestContext();
|
||||
|
|
|
|||
|
|
@ -1,133 +1,196 @@
|
|||
// Tests for inlinePriorMilestoneSummary — the cross-milestone context bridging helper.
|
||||
//
|
||||
// Scenarios covered:
|
||||
// (A) M002 with M001-SUMMARY.md present → returns string containing "Prior Milestone Summary" and summary content
|
||||
// (B) M001 (no prior milestone in dir) → returns null
|
||||
// (C) M002 with no M001-SUMMARY.md written → returns null
|
||||
// (D) M003 with M002 dir present but no M002-SUMMARY.md → returns null
|
||||
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join, dirname } from 'node:path';
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, mkdirSync, rmSync, readFileSync, existsSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { inlinePriorMilestoneSummary } from '../files.ts';
|
||||
import { createTestContext } from './test-helpers.ts';
|
||||
import { openDatabase, closeDatabase, getMilestone, getMilestoneSlices } from '../gsd-db.ts';
|
||||
import { handlePlanMilestone } from '../tools/plan-milestone.ts';
|
||||
import { parseRoadmap } from '../parsers-legacy.ts';
|
||||
|
||||
// ─── Worktree-aware prompt loader ──────────────────────────────────────────
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
|
||||
const { assertEq, assertTrue, report } = createTestContext();
|
||||
// ─── Fixture helpers ───────────────────────────────────────────────────────
|
||||
|
||||
function createFixtureBase(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-plan-ms-test-'));
|
||||
mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true });
|
||||
function makeTmpBase(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-plan-milestone-'));
|
||||
mkdirSync(join(base, '.gsd', 'milestones', 'M001'), { recursive: true });
|
||||
return base;
|
||||
}
|
||||
|
||||
function writeMilestoneDir(base: string, mid: string): void {
|
||||
mkdirSync(join(base, '.gsd', 'milestones', mid), { recursive: true });
|
||||
}
|
||||
|
||||
function writeMilestoneSummary(base: string, mid: string, content: string): void {
|
||||
const dir = join(base, '.gsd', 'milestones', mid);
|
||||
mkdirSync(dir, { recursive: true });
|
||||
writeFileSync(join(dir, `${mid}-SUMMARY.md`), content);
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
try { closeDatabase(); } catch { /* noop */ }
|
||||
try { rmSync(base, { recursive: true, force: true }); } catch { /* noop */ }
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Tests
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
async function main(): Promise<void> {
|
||||
|
||||
// ─── (A) M002 with M001-SUMMARY.md present ────────────────────────────────
|
||||
console.log('\n── (A) M002 with M001-SUMMARY.md present → string containing "Prior Milestone Summary"');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
writeMilestoneDir(base, 'M001');
|
||||
writeMilestoneDir(base, 'M002');
|
||||
writeMilestoneSummary(base, 'M001', '# M001 Summary\n\nKey decisions: used TypeScript throughout.\n');
|
||||
|
||||
const result = await inlinePriorMilestoneSummary('M002', base);
|
||||
|
||||
assertTrue(result !== null, '(A) result is not null when prior milestone has SUMMARY');
|
||||
assertTrue(
|
||||
typeof result === 'string' && result.includes('Prior Milestone Summary'),
|
||||
'(A) result contains "Prior Milestone Summary" label',
|
||||
);
|
||||
assertTrue(
|
||||
typeof result === 'string' && result.includes('Key decisions: used TypeScript throughout.'),
|
||||
'(A) result contains the summary file content',
|
||||
);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── (B) M001 (no prior milestone in dir) ─────────────────────────────────
|
||||
console.log('\n── (B) M001 — first milestone, no prior → null');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
writeMilestoneDir(base, 'M001');
|
||||
|
||||
const result = await inlinePriorMilestoneSummary('M001', base);
|
||||
|
||||
assertEq(result, null, '(B) M001 with no prior milestone → null');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── (C) M002 with no M001-SUMMARY.md ────────────────────────────────────
|
||||
console.log('\n── (C) M002 with M001 dir but no M001-SUMMARY.md → null');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
writeMilestoneDir(base, 'M001');
|
||||
writeMilestoneDir(base, 'M002');
|
||||
// Intentionally do NOT write M001-SUMMARY.md
|
||||
|
||||
const result = await inlinePriorMilestoneSummary('M002', base);
|
||||
|
||||
assertEq(result, null, '(C) M002 when M001 has no SUMMARY file → null');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── (D) M003 with M002 dir but no M002-SUMMARY.md ───────────────────────
|
||||
console.log('\n── (D) M003, M002 is immediately prior but has no SUMMARY → null');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
try {
|
||||
writeMilestoneDir(base, 'M001');
|
||||
writeMilestoneDir(base, 'M002');
|
||||
writeMilestoneDir(base, 'M003');
|
||||
// M001 has a summary — but M002 (the immediately prior to M003) does NOT
|
||||
writeMilestoneSummary(base, 'M001', '# M001 Summary\n\nOld context.\n');
|
||||
// Intentionally do NOT write M002-SUMMARY.md
|
||||
|
||||
const result = await inlinePriorMilestoneSummary('M003', base);
|
||||
|
||||
assertEq(result, null, '(D) M003 when M002 (immediately prior) has no SUMMARY → null');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
report();
|
||||
function validParams() {
|
||||
return {
|
||||
milestoneId: 'M001',
|
||||
title: 'DB-backed planning',
|
||||
vision: 'Make planning write through the database.',
|
||||
successCriteria: ['Planning persists', 'Roadmap renders from DB'],
|
||||
keyRisks: [
|
||||
{ risk: 'Renderer mismatch', whyItMatters: 'Rendered roadmap may stop round-tripping.' },
|
||||
],
|
||||
proofStrategy: [
|
||||
{ riskOrUnknown: 'Render correctness', retireIn: 'S01', whatWillBeProven: 'ROADMAP output matches DB state.' },
|
||||
],
|
||||
verificationContract: 'Contract verification text',
|
||||
verificationIntegration: 'Integration verification text',
|
||||
verificationOperational: 'Operational verification text',
|
||||
verificationUat: 'UAT verification text',
|
||||
definitionOfDone: ['Tests pass', 'Tool reruns cleanly'],
|
||||
requirementCoverage: 'Covers R015.',
|
||||
boundaryMapMarkdown: '| From | To | Produces | Consumes |\n|------|----|----------|----------|\n| S01 | terminal | roadmap | nothing |',
|
||||
slices: [
|
||||
{
|
||||
sliceId: 'S01',
|
||||
title: 'Tool wiring',
|
||||
risk: 'medium',
|
||||
depends: [],
|
||||
demo: 'The tool writes roadmap state.',
|
||||
goal: 'Wire the handler.',
|
||||
successCriteria: 'Handler persists state and renders markdown.',
|
||||
proofLevel: 'integration',
|
||||
integrationClosure: 'Downstream callers read rendered roadmap output.',
|
||||
observabilityImpact: 'Tests expose render and validation failures.',
|
||||
},
|
||||
{
|
||||
sliceId: 'S02',
|
||||
title: 'Prompt migration',
|
||||
risk: 'low',
|
||||
depends: ['S01'],
|
||||
demo: 'Prompts call the tool.',
|
||||
goal: 'Migrate prompts to DB-backed path.',
|
||||
successCriteria: 'Prompt contracts reference the new tool.',
|
||||
proofLevel: 'integration',
|
||||
integrationClosure: 'Prompt tests cover the new planning route.',
|
||||
observabilityImpact: 'Prompt and rogue-write failures become explicit.',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
test('handlePlanMilestone writes milestone and slice planning state and renders roadmap', async () => {
|
||||
const base = makeTmpBase();
|
||||
const dbPath = join(base, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
|
||||
try {
|
||||
const result = await handlePlanMilestone(validParams(), base);
|
||||
assert.ok(!('error' in result), `unexpected error: ${'error' in result ? result.error : ''}`);
|
||||
|
||||
const milestone = getMilestone('M001');
|
||||
assert.ok(milestone, 'milestone should exist');
|
||||
assert.equal(milestone?.vision, 'Make planning write through the database.');
|
||||
assert.deepEqual(milestone?.success_criteria, ['Planning persists', 'Roadmap renders from DB']);
|
||||
assert.equal(milestone?.verification_contract, 'Contract verification text');
|
||||
|
||||
const slices = getMilestoneSlices('M001');
|
||||
assert.equal(slices.length, 2);
|
||||
assert.equal(slices[0]?.id, 'S01');
|
||||
assert.equal(slices[0]?.goal, 'Wire the handler.');
|
||||
assert.equal(slices[1]?.depends[0], 'S01');
|
||||
|
||||
const roadmapPath = join(base, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md');
|
||||
assert.ok(existsSync(roadmapPath), 'roadmap should be rendered to disk');
|
||||
const roadmap = readFileSync(roadmapPath, 'utf-8');
|
||||
assert.match(roadmap, /# M001: DB-backed planning/);
|
||||
assert.match(roadmap, /\*\*Vision:\*\* Make planning write through the database\./);
|
||||
assert.match(roadmap, /- \[ \] \*\*S01: Tool wiring\*\* `risk:medium` `depends:\[\]`/);
|
||||
assert.match(roadmap, /- \[ \] \*\*S02: Prompt migration\*\* `risk:low` `depends:\[S01\]`/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanMilestone rejects invalid payloads', async () => {
|
||||
const base = makeTmpBase();
|
||||
const dbPath = join(base, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
|
||||
try {
|
||||
const params = validParams();
|
||||
const result = await handlePlanMilestone({ ...params, slices: [] }, base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /validation failed: slices must be a non-empty array/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanMilestone surfaces render failures and does not clear parse-visible state on failure', async () => {
|
||||
const base = makeTmpBase();
|
||||
const dbPath = join(base, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
|
||||
try {
|
||||
const fallbackRoadmapPath = join(base, '.gsd', 'milestones', 'MISSING', 'MISSING-ROADMAP.md');
|
||||
mkdirSync(fallbackRoadmapPath, { recursive: true });
|
||||
|
||||
const result = await handlePlanMilestone({ ...validParams(), milestoneId: 'MISSING' }, base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /render failed:/);
|
||||
|
||||
const existingRoadmapPath = join(base, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md');
|
||||
writeFileSync(existingRoadmapPath, '# M001: Cached roadmap\n\n**Vision:** old value\n\n## Slices\n\n', 'utf-8');
|
||||
const cachedAfter = parseRoadmap(readFileSync(existingRoadmapPath, 'utf-8'));
|
||||
assert.equal(cachedAfter.vision, 'old value');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanMilestone clears parse-visible roadmap state after successful render', async () => {
|
||||
const base = makeTmpBase();
|
||||
const dbPath = join(base, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
|
||||
try {
|
||||
const roadmapPath = join(base, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md');
|
||||
writeFileSync(roadmapPath, '# M001: Cached roadmap\n\n**Vision:** old value\n\n## Slices\n\n', 'utf-8');
|
||||
|
||||
const cachedBefore = parseRoadmap(readFileSync(roadmapPath, 'utf-8'));
|
||||
assert.equal(cachedBefore.vision, 'old value');
|
||||
|
||||
const result = await handlePlanMilestone(validParams(), base);
|
||||
assert.ok(!('error' in result));
|
||||
|
||||
const parsedAfter = parseRoadmap(readFileSync(roadmapPath, 'utf-8'));
|
||||
assert.equal(parsedAfter.vision, 'Make planning write through the database.');
|
||||
assert.equal(parsedAfter.slices.length, 2);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanMilestone reruns idempotently and updates existing planning state', async () => {
|
||||
const base = makeTmpBase();
|
||||
const dbPath = join(base, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
|
||||
try {
|
||||
const first = await handlePlanMilestone(validParams(), base);
|
||||
assert.ok(!('error' in first));
|
||||
|
||||
const second = await handlePlanMilestone({
|
||||
...validParams(),
|
||||
vision: 'Updated vision',
|
||||
slices: [
|
||||
{
|
||||
...validParams().slices[0],
|
||||
goal: 'Updated goal',
|
||||
observabilityImpact: 'Updated observability',
|
||||
},
|
||||
validParams().slices[1],
|
||||
],
|
||||
}, base);
|
||||
assert.ok(!('error' in second));
|
||||
|
||||
const milestone = getMilestone('M001');
|
||||
assert.equal(milestone?.vision, 'Updated vision');
|
||||
|
||||
const slices = getMilestoneSlices('M001');
|
||||
assert.equal(slices.length, 2);
|
||||
assert.equal(slices[0]?.goal, 'Updated goal');
|
||||
assert.equal(slices[0]?.observability_impact, 'Updated observability');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,474 +0,0 @@
|
|||
import { validateTaskPlanContent, validateSlicePlanContent } from '../observability-validator.ts';
|
||||
import { createTestContext } from './test-helpers.ts';
|
||||
|
||||
const { assertEq, assertTrue, report } = createTestContext();
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// validateTaskPlanContent — empty/missing Steps section
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: empty Steps section ===');
|
||||
{
|
||||
const content = `# T01: Some Task
|
||||
|
||||
## Description
|
||||
|
||||
Do something useful.
|
||||
|
||||
## Steps
|
||||
|
||||
## Verification
|
||||
|
||||
- Run the tests and confirm output.
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const stepsIssues = issues.filter(i => i.ruleId === 'empty_steps_section');
|
||||
assertTrue(stepsIssues.length >= 1, 'empty Steps section produces empty_steps_section issue');
|
||||
if (stepsIssues.length > 0) {
|
||||
assertEq(stepsIssues[0].severity, 'warning', 'empty_steps_section severity is warning');
|
||||
assertEq(stepsIssues[0].scope, 'task-plan', 'empty_steps_section scope is task-plan');
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: missing Steps section entirely ===');
|
||||
{
|
||||
const content = `# T01: Some Task
|
||||
|
||||
## Description
|
||||
|
||||
Do something useful.
|
||||
|
||||
## Verification
|
||||
|
||||
- Run the tests.
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const stepsIssues = issues.filter(i => i.ruleId === 'empty_steps_section');
|
||||
assertTrue(stepsIssues.length >= 1, 'missing Steps section produces empty_steps_section issue');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// validateTaskPlanContent — placeholder-only Verification
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: placeholder-only Verification ===');
|
||||
{
|
||||
const content = `# T01: Some Task
|
||||
|
||||
## Steps
|
||||
|
||||
1. Do the thing.
|
||||
2. Do the other thing.
|
||||
|
||||
## Verification
|
||||
|
||||
- {{placeholder verification step}}
|
||||
- {{another placeholder}}
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const verifyIssues = issues.filter(i => i.ruleId === 'placeholder_verification');
|
||||
assertTrue(verifyIssues.length >= 1, 'placeholder-only Verification produces placeholder_verification issue');
|
||||
if (verifyIssues.length > 0) {
|
||||
assertEq(verifyIssues[0].severity, 'warning', 'placeholder_verification severity is warning');
|
||||
assertEq(verifyIssues[0].scope, 'task-plan', 'placeholder_verification scope is task-plan');
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: Verification with only template text ===');
|
||||
{
|
||||
const content = `# T01: Some Task
|
||||
|
||||
## Steps
|
||||
|
||||
1. Do the thing.
|
||||
|
||||
## Verification
|
||||
|
||||
{{whatWasVerifiedAndHow — commands run, tests passed, behavior confirmed}}
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const verifyIssues = issues.filter(i => i.ruleId === 'placeholder_verification');
|
||||
assertTrue(verifyIssues.length >= 1, 'template-text-only Verification produces placeholder_verification issue');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// validateSlicePlanContent — empty inline task entries
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== validateSlicePlanContent: empty inline task entries ===');
|
||||
{
|
||||
const content = `# S01: Some Slice
|
||||
|
||||
**Goal:** Build the thing.
|
||||
**Demo:** It works.
|
||||
|
||||
## Tasks
|
||||
|
||||
- [ ] **T01: First Task** \`est:20m\`
|
||||
|
||||
- [ ] **T02: Second Task** \`est:15m\`
|
||||
|
||||
## Verification
|
||||
|
||||
- Run the tests.
|
||||
`;
|
||||
|
||||
const issues = validateSlicePlanContent('S01-PLAN.md', content);
|
||||
const emptyTaskIssues = issues.filter(i => i.ruleId === 'empty_task_entry');
|
||||
assertTrue(emptyTaskIssues.length >= 1, 'task entries with no description produce empty_task_entry issue');
|
||||
if (emptyTaskIssues.length > 0) {
|
||||
assertEq(emptyTaskIssues[0].severity, 'warning', 'empty_task_entry severity is warning');
|
||||
assertEq(emptyTaskIssues[0].scope, 'slice-plan', 'empty_task_entry scope is slice-plan');
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n=== validateSlicePlanContent: task entries with content are fine ===');
|
||||
{
|
||||
const content = `# S01: Some Slice
|
||||
|
||||
**Goal:** Build the thing.
|
||||
**Demo:** It works.
|
||||
|
||||
## Tasks
|
||||
|
||||
- [ ] **T01: First Task** \`est:20m\`
|
||||
- Why: Because it matters.
|
||||
- Files: \`src/index.ts\`
|
||||
- Do: Implement the feature.
|
||||
|
||||
- [ ] **T02: Second Task** \`est:15m\`
|
||||
- Why: Also important.
|
||||
- Do: Add tests.
|
||||
|
||||
## Verification
|
||||
|
||||
- Run the tests.
|
||||
`;
|
||||
|
||||
const issues = validateSlicePlanContent('S01-PLAN.md', content);
|
||||
const emptyTaskIssues = issues.filter(i => i.ruleId === 'empty_task_entry');
|
||||
assertEq(emptyTaskIssues.length, 0, 'task entries with description content produce no empty_task_entry issues');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// validateTaskPlanContent — scope_estimate over threshold
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: scope_estimate over threshold ===');
|
||||
{
|
||||
const content = `---
|
||||
estimated_steps: 12
|
||||
estimated_files: 15
|
||||
---
|
||||
|
||||
# T01: Big Task
|
||||
|
||||
## Steps
|
||||
|
||||
1. Step one.
|
||||
2. Step two.
|
||||
3. Step three.
|
||||
|
||||
## Verification
|
||||
|
||||
- Check it works.
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const stepsOverIssues = issues.filter(i => i.ruleId === 'scope_estimate_steps_high');
|
||||
const filesOverIssues = issues.filter(i => i.ruleId === 'scope_estimate_files_high');
|
||||
assertTrue(stepsOverIssues.length >= 1, 'estimated_steps=12 (>=10) produces scope_estimate_steps_high issue');
|
||||
assertTrue(filesOverIssues.length >= 1, 'estimated_files=15 (>=12) produces scope_estimate_files_high issue');
|
||||
if (stepsOverIssues.length > 0) {
|
||||
assertEq(stepsOverIssues[0].severity, 'warning', 'scope_estimate_steps_high severity is warning');
|
||||
assertEq(stepsOverIssues[0].scope, 'task-plan', 'scope_estimate_steps_high scope is task-plan');
|
||||
}
|
||||
if (filesOverIssues.length > 0) {
|
||||
assertEq(filesOverIssues[0].severity, 'warning', 'scope_estimate_files_high severity is warning');
|
||||
}
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// validateTaskPlanContent — scope_estimate within limits
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: scope_estimate within limits ===');
|
||||
{
|
||||
const content = `---
|
||||
estimated_steps: 4
|
||||
estimated_files: 6
|
||||
---
|
||||
|
||||
# T01: Small Task
|
||||
|
||||
## Steps
|
||||
|
||||
1. Do the thing.
|
||||
|
||||
## Verification
|
||||
|
||||
- Verify it works.
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const scopeIssues = issues.filter(i =>
|
||||
i.ruleId === 'scope_estimate_steps_high' || i.ruleId === 'scope_estimate_files_high'
|
||||
);
|
||||
assertEq(scopeIssues.length, 0, 'scope_estimate within limits produces no scope issues');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// validateTaskPlanContent — missing scope_estimate (no warning)
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: missing scope_estimate ===');
|
||||
{
|
||||
const content = `# T01: No Frontmatter Task
|
||||
|
||||
## Steps
|
||||
|
||||
1. Do the thing.
|
||||
|
||||
## Verification
|
||||
|
||||
- Verify it works.
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const scopeIssues = issues.filter(i =>
|
||||
i.ruleId === 'scope_estimate_steps_high' || i.ruleId === 'scope_estimate_files_high'
|
||||
);
|
||||
assertEq(scopeIssues.length, 0, 'missing scope_estimate produces no scope issues');
|
||||
}
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: frontmatter without scope keys ===');
|
||||
{
|
||||
const content = `---
|
||||
id: T01
|
||||
parent: S01
|
||||
---
|
||||
|
||||
# T01: Task With Other Frontmatter
|
||||
|
||||
## Steps
|
||||
|
||||
1. Do the thing.
|
||||
|
||||
## Verification
|
||||
|
||||
- Verify it works.
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const scopeIssues = issues.filter(i =>
|
||||
i.ruleId === 'scope_estimate_steps_high' || i.ruleId === 'scope_estimate_files_high'
|
||||
);
|
||||
assertEq(scopeIssues.length, 0, 'frontmatter without scope keys produces no scope issues');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Clean plans — no false positives
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== Clean task plan: no plan-quality issues ===');
|
||||
{
|
||||
const content = `---
|
||||
estimated_steps: 5
|
||||
estimated_files: 3
|
||||
---
|
||||
|
||||
# T01: Well-Formed Task
|
||||
|
||||
## Description
|
||||
|
||||
A real task with real content.
|
||||
|
||||
## Steps
|
||||
|
||||
1. Read the input files.
|
||||
2. Parse the configuration.
|
||||
3. Transform the data.
|
||||
4. Write the output.
|
||||
5. Verify the results.
|
||||
|
||||
## Must-Haves
|
||||
|
||||
- [ ] Output file is valid JSON
|
||||
- [ ] All input records are processed
|
||||
|
||||
## Verification
|
||||
|
||||
- Run \`node --test tests/transform.test.ts\` — all assertions pass
|
||||
- Manually inspect output.json for correct structure
|
||||
|
||||
## Observability Impact
|
||||
|
||||
- Signals added/changed: structured error log on parse failure
|
||||
- How a future agent inspects this: check stderr for JSON parse errors
|
||||
- Failure state exposed: exit code 1 + error message on invalid input
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const planQualityIssues = issues.filter(i =>
|
||||
i.ruleId === 'empty_steps_section' ||
|
||||
i.ruleId === 'placeholder_verification' ||
|
||||
i.ruleId === 'scope_estimate_steps_high' ||
|
||||
i.ruleId === 'scope_estimate_files_high'
|
||||
);
|
||||
assertEq(planQualityIssues.length, 0, 'clean task plan produces no plan-quality issues');
|
||||
}
|
||||
|
||||
console.log('\n=== Clean slice plan: no plan-quality issues ===');
|
||||
{
|
||||
const content = `# S01: Well-Formed Slice
|
||||
|
||||
**Goal:** Build a complete feature.
|
||||
**Demo:** Run the test suite and see all green.
|
||||
|
||||
## Tasks
|
||||
|
||||
- [ ] **T01: Create tests** \`est:20m\`
|
||||
- Why: Tests define the contract before implementation.
|
||||
- Files: \`tests/feature.test.ts\`
|
||||
- Do: Write comprehensive test assertions.
|
||||
- Verify: Test file runs without syntax errors.
|
||||
|
||||
- [ ] **T02: Implement feature** \`est:30m\`
|
||||
- Why: Core implementation.
|
||||
- Files: \`src/feature.ts\`
|
||||
- Do: Build the feature to make tests pass.
|
||||
- Verify: All tests pass.
|
||||
|
||||
## Verification
|
||||
|
||||
- \`node --test tests/feature.test.ts\` — all assertions pass
|
||||
- Check error output for diagnostic messages
|
||||
|
||||
## Observability / Diagnostics
|
||||
|
||||
- Runtime signals: structured error objects with error codes
|
||||
- Inspection surfaces: test output shows pass/fail counts
|
||||
- Failure visibility: exit code 1 on failure with descriptive message
|
||||
- Redaction constraints: none
|
||||
`;
|
||||
|
||||
const issues = validateSlicePlanContent('S01-PLAN.md', content);
|
||||
const planQualityIssues = issues.filter(i => i.ruleId === 'empty_task_entry');
|
||||
assertEq(planQualityIssues.length, 0, 'clean slice plan produces no empty_task_entry issues');
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// validateTaskPlanContent — missing output file paths
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: missing output file paths ===');
|
||||
{
|
||||
const content = `# T01: Some Task
|
||||
|
||||
## Description
|
||||
|
||||
Do something.
|
||||
|
||||
## Steps
|
||||
|
||||
1. Do the thing
|
||||
|
||||
## Verification
|
||||
|
||||
- Check it works
|
||||
|
||||
## Expected Output
|
||||
|
||||
This task produces the main output.
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const outputIssues = issues.filter(i => i.ruleId === 'missing_output_file_paths');
|
||||
assertTrue(outputIssues.length >= 1, 'Expected Output without file paths triggers missing_output_file_paths');
|
||||
}
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: valid output file paths ===');
|
||||
{
|
||||
const content = `# T01: Some Task
|
||||
|
||||
## Description
|
||||
|
||||
Do something.
|
||||
|
||||
## Steps
|
||||
|
||||
1. Do the thing
|
||||
|
||||
## Verification
|
||||
|
||||
- Check it works
|
||||
|
||||
## Expected Output
|
||||
|
||||
- \`src/types.ts\` — New type definitions
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const outputIssues = issues.filter(i => i.ruleId === 'missing_output_file_paths');
|
||||
assertEq(outputIssues.length, 0, 'Expected Output with file paths does not trigger warning');
|
||||
}
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: missing input file paths (info severity) ===');
|
||||
{
|
||||
const content = `# T01: Some Task
|
||||
|
||||
## Description
|
||||
|
||||
Do something.
|
||||
|
||||
## Steps
|
||||
|
||||
1. Do the thing
|
||||
|
||||
## Verification
|
||||
|
||||
- Check it works
|
||||
|
||||
## Inputs
|
||||
|
||||
Prior task summary insights about the architecture.
|
||||
|
||||
## Expected Output
|
||||
|
||||
- \`src/output.ts\` — Output file
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const inputIssues = issues.filter(i => i.ruleId === 'missing_input_file_paths');
|
||||
assertTrue(inputIssues.length >= 1, 'Inputs without file paths triggers missing_input_file_paths');
|
||||
if (inputIssues.length > 0) {
|
||||
assertEq(inputIssues[0].severity, 'info', 'missing_input_file_paths is info severity (not warning)');
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n=== validateTaskPlanContent: no Expected Output section at all ===');
|
||||
{
|
||||
const content = `# T01: Some Task
|
||||
|
||||
## Description
|
||||
|
||||
Do something.
|
||||
|
||||
## Steps
|
||||
|
||||
1. Do the thing
|
||||
|
||||
## Verification
|
||||
|
||||
- Check it works
|
||||
`;
|
||||
|
||||
const issues = validateTaskPlanContent('T01-PLAN.md', content);
|
||||
const outputIssues = issues.filter(i => i.ruleId === 'missing_output_file_paths');
|
||||
assertTrue(outputIssues.length >= 1, 'Missing Expected Output section triggers missing_output_file_paths');
|
||||
}
|
||||
|
||||
report();
|
||||
|
|
@ -54,6 +54,13 @@ test("plan-slice prompt: all variables substituted", () => {
|
|||
assert.ok(result.includes("S01"));
|
||||
});
|
||||
|
||||
test("plan-slice prompt: DB-backed tool names survive template substitution", () => {
|
||||
const result = loadPrompt("plan-slice", { ...BASE_VARS, commitInstruction: "Do not commit." });
|
||||
assert.ok(result.includes("gsd_plan_slice"), "gsd_plan_slice should appear in rendered prompt");
|
||||
assert.ok(result.includes("gsd_plan_task"), "gsd_plan_task should appear in rendered prompt");
|
||||
assert.ok(result.includes("canonical write path"), "canonical write path language should survive substitution");
|
||||
});
|
||||
|
||||
test("domain-work prompts use skillActivation placeholder", () => {
|
||||
const prompts = [
|
||||
"research-milestone",
|
||||
|
|
|
|||
179
src/resources/extensions/gsd/tests/plan-slice.test.ts
Normal file
179
src/resources/extensions/gsd/tests/plan-slice.test.ts
Normal file
|
|
@ -0,0 +1,179 @@
|
|||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, mkdirSync, rmSync, readFileSync, existsSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { openDatabase, closeDatabase, insertMilestone, insertSlice, getSlice, getSliceTasks, getTask } from '../gsd-db.ts';
|
||||
import { handlePlanSlice } from '../tools/plan-slice.ts';
|
||||
import { parsePlan } from '../parsers-legacy.ts';
|
||||
import { parseTaskPlanFile } from '../files.ts';
|
||||
|
||||
function makeTmpBase(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-plan-slice-'));
|
||||
mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks'), { recursive: true });
|
||||
return base;
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
try { closeDatabase(); } catch { /* noop */ }
|
||||
try { rmSync(base, { recursive: true, force: true }); } catch { /* noop */ }
|
||||
}
|
||||
|
||||
function seedParentSlice(): void {
|
||||
insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' });
|
||||
insertSlice({ id: 'S02', milestoneId: 'M001', title: 'Planning slice', status: 'pending', demo: 'Rendered plans exist.' });
|
||||
}
|
||||
|
||||
function validParams() {
|
||||
return {
|
||||
milestoneId: 'M001',
|
||||
sliceId: 'S02',
|
||||
goal: 'Persist slice planning through the DB.',
|
||||
successCriteria: '- Slice plan renders from DB\n- Task plan files are regenerated',
|
||||
proofLevel: 'integration',
|
||||
integrationClosure: 'Planning handlers now write DB rows and render plan artifacts.',
|
||||
observabilityImpact: '- Validation failures return structured errors\n- Cache invalidation is proven by parse-visible state updates',
|
||||
tasks: [
|
||||
{
|
||||
taskId: 'T01',
|
||||
title: 'Write slice handler',
|
||||
description: 'Implement the slice planning handler.',
|
||||
estimate: '45m',
|
||||
files: ['src/resources/extensions/gsd/tools/plan-slice.ts'],
|
||||
verify: 'node --test src/resources/extensions/gsd/tests/plan-slice.test.ts',
|
||||
inputs: ['src/resources/extensions/gsd/tools/plan-milestone.ts'],
|
||||
expectedOutput: ['src/resources/extensions/gsd/tools/plan-slice.ts'],
|
||||
observabilityImpact: 'Tests exercise cache invalidation and render failure paths.',
|
||||
},
|
||||
{
|
||||
taskId: 'T02',
|
||||
title: 'Write task handler',
|
||||
description: 'Implement the task planning handler.',
|
||||
estimate: '30m',
|
||||
files: ['src/resources/extensions/gsd/tools/plan-task.ts'],
|
||||
verify: 'node --test src/resources/extensions/gsd/tests/plan-task.test.ts',
|
||||
inputs: ['src/resources/extensions/gsd/tools/plan-task.ts'],
|
||||
expectedOutput: ['src/resources/extensions/gsd/tests/plan-task.test.ts'],
|
||||
observabilityImpact: 'Task-plan renders remain parse-compatible.',
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
test('handlePlanSlice writes slice/task planning state and renders plan artifacts', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedParentSlice();
|
||||
|
||||
const result = await handlePlanSlice(validParams(), base);
|
||||
assert.ok(!('error' in result), `unexpected error: ${'error' in result ? result.error : ''}`);
|
||||
|
||||
const slice = getSlice('M001', 'S02');
|
||||
assert.ok(slice);
|
||||
assert.equal(slice?.goal, 'Persist slice planning through the DB.');
|
||||
assert.equal(slice?.proof_level, 'integration');
|
||||
|
||||
const tasks = getSliceTasks('M001', 'S02');
|
||||
assert.equal(tasks.length, 2);
|
||||
assert.equal(tasks[0]?.title, 'Write slice handler');
|
||||
assert.equal(tasks[0]?.description, 'Implement the slice planning handler.');
|
||||
assert.equal(tasks[1]?.estimate, '30m');
|
||||
|
||||
const planPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md');
|
||||
assert.ok(existsSync(planPath), 'slice plan should be rendered to disk');
|
||||
const parsedPlan = parsePlan(readFileSync(planPath, 'utf-8'));
|
||||
assert.equal(parsedPlan.goal, 'Persist slice planning through the DB.');
|
||||
assert.equal(parsedPlan.tasks.length, 2);
|
||||
assert.equal(parsedPlan.tasks[0]?.id, 'T01');
|
||||
|
||||
const taskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T01-PLAN.md');
|
||||
assert.ok(existsSync(taskPlanPath), 'task plan should be rendered to disk');
|
||||
const taskPlan = parseTaskPlanFile(readFileSync(taskPlanPath, 'utf-8'));
|
||||
assert.deepEqual(taskPlan.frontmatter.skills_used, []);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanSlice rejects invalid payloads', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedParentSlice();
|
||||
const result = await handlePlanSlice({ ...validParams(), tasks: [] }, base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /validation failed: tasks must be a non-empty array/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanSlice rejects missing parent slice', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' });
|
||||
const result = await handlePlanSlice(validParams(), base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /missing parent slice: M001\/S02/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanSlice surfaces render failures without changing parse-visible task-plan state for the failing task', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedParentSlice();
|
||||
const failingTaskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T01-PLAN.md');
|
||||
writeFileSync(failingTaskPlanPath, '---\nestimated_steps: 1\nestimated_files: 1\nskills_used: []\n---\n\n# T01: Cached task\n', 'utf-8');
|
||||
rmSync(failingTaskPlanPath, { force: true });
|
||||
mkdirSync(failingTaskPlanPath, { recursive: true });
|
||||
|
||||
const result = await handlePlanSlice(validParams(), base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /render failed:/);
|
||||
|
||||
assert.ok(existsSync(failingTaskPlanPath), 'failing task plan path should remain the blocking directory');
|
||||
assert.equal(getTask('M001', 'S02', 'T01')?.description, 'Implement the slice planning handler.');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanSlice reruns idempotently and refreshes parse-visible state', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedParentSlice();
|
||||
writeFileSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'), '# S02: Cached\n\n**Goal:** old value\n\n## Tasks\n\n- [ ] **T01: Cached task**\n', 'utf-8');
|
||||
|
||||
const first = await handlePlanSlice(validParams(), base);
|
||||
assert.ok(!('error' in first));
|
||||
|
||||
const second = await handlePlanSlice({
|
||||
...validParams(),
|
||||
goal: 'Updated goal from rerun.',
|
||||
tasks: [
|
||||
{ ...validParams().tasks[0], description: 'Updated slice handler description.' },
|
||||
validParams().tasks[1],
|
||||
],
|
||||
}, base);
|
||||
assert.ok(!('error' in second));
|
||||
|
||||
const parsedAfter = parsePlan(readFileSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'S02-PLAN.md'), 'utf-8'));
|
||||
assert.equal(parsedAfter.goal, 'Updated goal from rerun.');
|
||||
const task = getTask('M001', 'S02', 'T01');
|
||||
assert.equal(task?.description, 'Updated slice handler description.');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
145
src/resources/extensions/gsd/tests/plan-task.test.ts
Normal file
145
src/resources/extensions/gsd/tests/plan-task.test.ts
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, mkdirSync, rmSync, readFileSync, existsSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import { openDatabase, closeDatabase, insertMilestone, insertSlice, insertTask, getTask } from '../gsd-db.ts';
|
||||
import { handlePlanTask } from '../tools/plan-task.ts';
|
||||
import { parseTaskPlanFile } from '../files.ts';
|
||||
|
||||
function makeTmpBase(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-plan-task-'));
|
||||
mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks'), { recursive: true });
|
||||
return base;
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
try { closeDatabase(); } catch { /* noop */ }
|
||||
try { rmSync(base, { recursive: true, force: true }); } catch { /* noop */ }
|
||||
}
|
||||
|
||||
function seedParent(): void {
|
||||
insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' });
|
||||
insertSlice({ id: 'S02', milestoneId: 'M001', title: 'Planning slice', status: 'pending', demo: 'Rendered plans exist.' });
|
||||
}
|
||||
|
||||
function validParams() {
|
||||
return {
|
||||
milestoneId: 'M001',
|
||||
sliceId: 'S02',
|
||||
taskId: 'T02',
|
||||
title: 'Write task handler',
|
||||
description: 'Implement the DB-backed task planning handler.',
|
||||
estimate: '30m',
|
||||
files: ['src/resources/extensions/gsd/tools/plan-task.ts'],
|
||||
verify: 'node --test src/resources/extensions/gsd/tests/plan-task.test.ts',
|
||||
inputs: ['src/resources/extensions/gsd/tools/plan-task.ts'],
|
||||
expectedOutput: ['src/resources/extensions/gsd/tests/plan-task.test.ts'],
|
||||
observabilityImpact: 'Tests exercise validation, render failure, and cache refresh behavior.',
|
||||
};
|
||||
}
|
||||
|
||||
test('handlePlanTask writes planning state and renders task plan', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedParent();
|
||||
const result = await handlePlanTask(validParams(), base);
|
||||
assert.ok(!('error' in result), `unexpected error: ${'error' in result ? result.error : ''}`);
|
||||
|
||||
const task = getTask('M001', 'S02', 'T02');
|
||||
assert.ok(task);
|
||||
assert.equal(task?.title, 'Write task handler');
|
||||
assert.equal(task?.description, 'Implement the DB-backed task planning handler.');
|
||||
assert.equal(task?.estimate, '30m');
|
||||
|
||||
const taskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T02-PLAN.md');
|
||||
assert.ok(existsSync(taskPlanPath), 'task plan should be rendered to disk');
|
||||
const taskPlan = parseTaskPlanFile(readFileSync(taskPlanPath, 'utf-8'));
|
||||
assert.equal(taskPlan.frontmatter.estimated_files, 1);
|
||||
assert.deepEqual(taskPlan.frontmatter.skills_used, []);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanTask rejects invalid payloads', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedParent();
|
||||
const result = await handlePlanTask({ ...validParams(), files: [''] }, base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /validation failed: files must contain only non-empty strings/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanTask rejects missing parent slice', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' });
|
||||
const result = await handlePlanTask(validParams(), base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /missing parent slice: M001\/S02/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanTask surfaces render failures without changing parse-visible task plan state', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedParent();
|
||||
insertTask({ id: 'T02', sliceId: 'S02', milestoneId: 'M001', title: 'Cached task', status: 'pending' });
|
||||
const taskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T02-PLAN.md');
|
||||
writeFileSync(taskPlanPath, '---\nestimated_steps: 1\nestimated_files: 1\nskills_used: []\n---\n\n# T02: Cached task\n', 'utf-8');
|
||||
rmSync(taskPlanPath, { force: true });
|
||||
mkdirSync(taskPlanPath, { recursive: true });
|
||||
|
||||
const result = await handlePlanTask(validParams(), base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /render failed:/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handlePlanTask reruns idempotently and refreshes parse-visible state', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedParent();
|
||||
const taskPlanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02', 'tasks', 'T02-PLAN.md');
|
||||
writeFileSync(taskPlanPath, '---\nestimated_steps: 1\nestimated_files: 1\nskills_used: []\n---\n\n# T02: Cached task\n', 'utf-8');
|
||||
|
||||
const first = await handlePlanTask(validParams(), base);
|
||||
assert.ok(!('error' in first));
|
||||
|
||||
const second = await handlePlanTask({
|
||||
...validParams(),
|
||||
description: 'Updated task handler description.',
|
||||
estimate: '1h',
|
||||
}, base);
|
||||
assert.ok(!('error' in second));
|
||||
|
||||
const task = getTask('M001', 'S02', 'T02');
|
||||
assert.equal(task?.description, 'Updated task handler description.');
|
||||
assert.equal(task?.estimate, '1h');
|
||||
|
||||
const parsed = parseTaskPlanFile(readFileSync(taskPlanPath, 'utf-8'));
|
||||
assert.equal(parsed.frontmatter.estimated_steps, 1);
|
||||
assert.match(readFileSync(taskPlanPath, 'utf-8'), /Updated task handler description\./);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
305
src/resources/extensions/gsd/tests/planning-crossval.test.ts
Normal file
305
src/resources/extensions/gsd/tests/planning-crossval.test.ts
Normal file
|
|
@ -0,0 +1,305 @@
|
|||
// planning-crossval.test.ts — Cross-validation: DB→render→parse round-trip parity
|
||||
// Proves R014: DB state matches rendered-then-parsed state during the transition window.
|
||||
// Each test seeds planning data into DB via insert functions, renders markdown via
|
||||
// renderers, parses back via existing parsers, and asserts field-by-field parity.
|
||||
|
||||
import { mkdtempSync, mkdirSync, readFileSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import {
|
||||
openDatabase,
|
||||
closeDatabase,
|
||||
insertMilestone,
|
||||
insertSlice,
|
||||
insertTask,
|
||||
getMilestoneSlices,
|
||||
getSliceTasks,
|
||||
} from '../gsd-db.ts';
|
||||
import {
|
||||
renderRoadmapFromDb,
|
||||
renderPlanFromDb,
|
||||
} from '../markdown-renderer.ts';
|
||||
import { parseRoadmapSlices } from '../roadmap-slices.ts';
|
||||
import { parsePlan } from '../parsers-legacy.ts';
|
||||
import { createTestContext } from './test-helpers.ts';
|
||||
|
||||
const { assertEq, assertTrue, report } = createTestContext();
|
||||
|
||||
// ─── Fixture Helpers ───────────────────────────────────────────────────────
|
||||
|
||||
function createFixtureBase(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-planning-crossval-'));
|
||||
mkdirSync(join(base, '.gsd', 'milestones'), { recursive: true });
|
||||
return base;
|
||||
}
|
||||
|
||||
/** Scaffold the minimal directory structure the renderers need on disk. */
|
||||
function scaffoldDirs(base: string, milestoneId: string, sliceIds: string[]): void {
|
||||
mkdirSync(join(base, '.gsd', 'milestones', milestoneId), { recursive: true });
|
||||
for (const sid of sliceIds) {
|
||||
mkdirSync(join(base, '.gsd', 'milestones', milestoneId, 'slices', sid, 'tasks'), { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Test 1: ROADMAP DB→render→parse round-trip parity
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== planning-crossval Test 1: ROADMAP round-trip parity ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
const dbPath = join(base, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
try {
|
||||
scaffoldDirs(base, 'M001', ['S01', 'S02', 'S03', 'S04']);
|
||||
|
||||
// Insert milestone
|
||||
insertMilestone({
|
||||
id: 'M001',
|
||||
title: 'Crossval Test Project',
|
||||
status: 'active',
|
||||
planning: { vision: 'Test round-trip parity.' },
|
||||
});
|
||||
|
||||
// Insert 4 slices with varied status, depends, risk, and demo
|
||||
const dbSlices = [
|
||||
{ id: 'S01', title: 'Foundation', status: 'complete', risk: 'low', depends: [] as string[], demo: 'Foundation laid.', sequence: 1 },
|
||||
{ id: 'S02', title: 'Core Logic', status: 'complete', risk: 'medium', depends: ['S01'], demo: 'Core working.', sequence: 2 },
|
||||
{ id: 'S03', title: 'Integration', status: 'pending', risk: 'high', depends: ['S01', 'S02'], demo: 'Integrated.', sequence: 3 },
|
||||
{ id: 'S04', title: 'Polish', status: 'pending', risk: 'low', depends: ['S03'], demo: 'Polished.', sequence: 4 },
|
||||
];
|
||||
|
||||
for (const s of dbSlices) {
|
||||
insertSlice({
|
||||
id: s.id,
|
||||
milestoneId: 'M001',
|
||||
title: s.title,
|
||||
status: s.status,
|
||||
risk: s.risk,
|
||||
depends: s.depends,
|
||||
demo: s.demo,
|
||||
sequence: s.sequence,
|
||||
});
|
||||
}
|
||||
|
||||
// Render ROADMAP.md from DB
|
||||
const rendered = await renderRoadmapFromDb(base, 'M001');
|
||||
const content = readFileSync(rendered.roadmapPath, 'utf-8');
|
||||
|
||||
// Parse back
|
||||
const parsedSlices = parseRoadmapSlices(content);
|
||||
|
||||
// Assert slice count
|
||||
assertEq(parsedSlices.length, dbSlices.length, 'T1: slice count matches');
|
||||
|
||||
// Assert field parity for each slice
|
||||
for (let i = 0; i < dbSlices.length; i++) {
|
||||
const db = dbSlices[i];
|
||||
const parsed = parsedSlices[i];
|
||||
assertEq(parsed.id, db.id, `T1: slice[${i}].id`);
|
||||
assertEq(parsed.title, db.title, `T1: slice[${i}].title`);
|
||||
assertEq(parsed.done, db.status === 'complete', `T1: slice[${i}].done matches status`);
|
||||
assertEq(parsed.risk, db.risk, `T1: slice[${i}].risk`);
|
||||
assertEq(JSON.stringify(parsed.depends), JSON.stringify(db.depends), `T1: slice[${i}].depends`);
|
||||
}
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Test 2: PLAN DB→render→parse round-trip parity
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== planning-crossval Test 2: PLAN round-trip parity ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
const dbPath = join(base, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
try {
|
||||
scaffoldDirs(base, 'M001', ['S01']);
|
||||
|
||||
insertMilestone({
|
||||
id: 'M001',
|
||||
title: 'Plan Crossval',
|
||||
status: 'active',
|
||||
planning: { vision: 'Test plan round-trip.' },
|
||||
});
|
||||
|
||||
insertSlice({
|
||||
id: 'S01',
|
||||
milestoneId: 'M001',
|
||||
title: 'Core Slice',
|
||||
status: 'pending',
|
||||
demo: 'Core working.',
|
||||
planning: {
|
||||
goal: 'Build the core feature.',
|
||||
successCriteria: '- Tests pass\n- Coverage above 80%',
|
||||
},
|
||||
});
|
||||
|
||||
// Insert 3 tasks with planning fields populated
|
||||
const dbTasks = [
|
||||
{
|
||||
id: 'T01',
|
||||
title: 'Setup types',
|
||||
status: 'complete',
|
||||
description: 'Define TypeScript interfaces for all domain types.',
|
||||
files: ['src/types.ts', 'src/interfaces.ts'],
|
||||
verify: 'node --test types.test.ts',
|
||||
estimate: '30m',
|
||||
sequence: 1,
|
||||
},
|
||||
{
|
||||
id: 'T02',
|
||||
title: 'Implement logic',
|
||||
status: 'pending',
|
||||
description: 'Build the core business logic module.',
|
||||
files: ['src/logic.ts'],
|
||||
verify: 'node --test logic.test.ts',
|
||||
estimate: '1h',
|
||||
sequence: 2,
|
||||
},
|
||||
{
|
||||
id: 'T03',
|
||||
title: 'Write tests',
|
||||
status: 'pending',
|
||||
description: 'Create comprehensive test coverage.',
|
||||
files: ['src/tests/core.test.ts', 'src/tests/edge.test.ts'],
|
||||
verify: 'npm test',
|
||||
estimate: '45m',
|
||||
sequence: 3,
|
||||
},
|
||||
];
|
||||
|
||||
for (const t of dbTasks) {
|
||||
insertTask({
|
||||
id: t.id,
|
||||
sliceId: 'S01',
|
||||
milestoneId: 'M001',
|
||||
title: t.title,
|
||||
status: t.status,
|
||||
sequence: t.sequence,
|
||||
planning: {
|
||||
description: t.description,
|
||||
files: t.files,
|
||||
verify: t.verify,
|
||||
estimate: t.estimate,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Render PLAN from DB
|
||||
const rendered = await renderPlanFromDb(base, 'M001', 'S01');
|
||||
const content = readFileSync(rendered.planPath, 'utf-8');
|
||||
|
||||
// Parse back
|
||||
const parsedPlan = parsePlan(content);
|
||||
|
||||
// Assert task count
|
||||
assertEq(parsedPlan.tasks.length, 3, 'T2: task count matches');
|
||||
|
||||
// Assert field parity for each task
|
||||
for (let i = 0; i < dbTasks.length; i++) {
|
||||
const db = dbTasks[i];
|
||||
const parsed = parsedPlan.tasks[i];
|
||||
assertEq(parsed.id, db.id, `T2: task[${i}].id`);
|
||||
assertEq(parsed.title, db.title, `T2: task[${i}].title`);
|
||||
assertEq(parsed.verify, db.verify, `T2: task[${i}].verify`);
|
||||
assertEq(parsed.done, db.status === 'complete', `T2: task[${i}].done matches status`);
|
||||
}
|
||||
|
||||
// Assert filesLikelyTouched contains all files from all tasks
|
||||
const allFiles = dbTasks.flatMap(t => t.files);
|
||||
for (const file of allFiles) {
|
||||
assertTrue(
|
||||
parsedPlan.filesLikelyTouched.includes(file),
|
||||
`T2: filesLikelyTouched contains ${file}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Assert task order matches sequence ordering (T01, T02, T03)
|
||||
assertEq(parsedPlan.tasks[0].id, 'T01', 'T2: first task is T01 (sequence 1)');
|
||||
assertEq(parsedPlan.tasks[1].id, 'T02', 'T2: second task is T02 (sequence 2)');
|
||||
assertEq(parsedPlan.tasks[2].id, 'T03', 'T2: third task is T03 (sequence 3)');
|
||||
|
||||
// Assert task files preserved
|
||||
assertEq(
|
||||
JSON.stringify(parsedPlan.tasks[0].files),
|
||||
JSON.stringify(dbTasks[0].files),
|
||||
'T2: task[0].files match DB',
|
||||
);
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Test 3: Sequence ordering parity — non-sequential insertion order
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
console.log('\n=== planning-crossval Test 3: Sequence ordering parity ===');
|
||||
{
|
||||
const base = createFixtureBase();
|
||||
const dbPath = join(base, '.gsd', 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
try {
|
||||
scaffoldDirs(base, 'M001', ['S01', 'S02', 'S03', 'S04']);
|
||||
|
||||
insertMilestone({
|
||||
id: 'M001',
|
||||
title: 'Sequence Test',
|
||||
status: 'active',
|
||||
planning: { vision: 'Test sequence ordering.' },
|
||||
});
|
||||
|
||||
// Insert slices in scrambled order with explicit sequence values
|
||||
// Insertion order: S03(seq=3), S01(seq=1), S04(seq=4), S02(seq=2)
|
||||
// Expected render/parse order: S01, S02, S03, S04 (by sequence)
|
||||
insertSlice({ id: 'S03', milestoneId: 'M001', title: 'Third', status: 'pending', risk: 'low', demo: 'Third done.', sequence: 3 });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'First', status: 'complete', risk: 'low', demo: 'First done.', sequence: 1 });
|
||||
insertSlice({ id: 'S04', milestoneId: 'M001', title: 'Fourth', status: 'pending', risk: 'high', demo: 'Fourth done.', sequence: 4 });
|
||||
insertSlice({ id: 'S02', milestoneId: 'M001', title: 'Second', status: 'complete', risk: 'medium', demo: 'Second done.', sequence: 2 });
|
||||
|
||||
// Verify DB query returns sequence-ordered results
|
||||
const dbSlices = getMilestoneSlices('M001');
|
||||
assertEq(dbSlices.length, 4, 'T3: DB returns 4 slices');
|
||||
assertEq(dbSlices[0].id, 'S01', 'T3: DB first slice is S01 (sequence 1)');
|
||||
assertEq(dbSlices[1].id, 'S02', 'T3: DB second slice is S02 (sequence 2)');
|
||||
assertEq(dbSlices[2].id, 'S03', 'T3: DB third slice is S03 (sequence 3)');
|
||||
assertEq(dbSlices[3].id, 'S04', 'T3: DB fourth slice is S04 (sequence 4)');
|
||||
|
||||
// Render ROADMAP from DB — should produce slices in sequence order
|
||||
const rendered = await renderRoadmapFromDb(base, 'M001');
|
||||
const content = readFileSync(rendered.roadmapPath, 'utf-8');
|
||||
|
||||
// Parse back
|
||||
const parsedSlices = parseRoadmapSlices(content);
|
||||
|
||||
// Assert parsed order matches sequence order, NOT insertion order
|
||||
assertEq(parsedSlices.length, 4, 'T3: parsed 4 slices');
|
||||
assertEq(parsedSlices[0].id, 'S01', 'T3: parsed first slice is S01 (sequence 1)');
|
||||
assertEq(parsedSlices[1].id, 'S02', 'T3: parsed second slice is S02 (sequence 2)');
|
||||
assertEq(parsedSlices[2].id, 'S03', 'T3: parsed third slice is S03 (sequence 3)');
|
||||
assertEq(parsedSlices[3].id, 'S04', 'T3: parsed fourth slice is S04 (sequence 4)');
|
||||
|
||||
// Assert full parity through DB→render→parse round-trip
|
||||
for (let i = 0; i < 4; i++) {
|
||||
assertEq(parsedSlices[i].id, dbSlices[i].id, `T3: round-trip slice[${i}].id`);
|
||||
assertEq(parsedSlices[i].done, dbSlices[i].status === 'complete', `T3: round-trip slice[${i}].done`);
|
||||
assertEq(parsedSlices[i].title, dbSlices[i].title, `T3: round-trip slice[${i}].title`);
|
||||
}
|
||||
} finally {
|
||||
closeDatabase();
|
||||
cleanup(base);
|
||||
}
|
||||
}
|
||||
|
||||
report();
|
||||
|
|
@ -130,6 +130,66 @@ test("complete-slice prompt still contains template variables for context", () =
|
|||
assert.match(prompt, /\{\{roadmapPath\}\}/);
|
||||
});
|
||||
|
||||
test("plan-milestone prompt references DB-backed planning tool and explicitly forbids manual roadmap writes", () => {
|
||||
const prompt = readPrompt("plan-milestone");
|
||||
assert.match(prompt, /gsd_plan_milestone/);
|
||||
assert.match(prompt, /Do \*\*not\*\* write `?\{\{outputPath\}\}`?, `?ROADMAP\.md`?, or other planning artifacts manually/i);
|
||||
});
|
||||
|
||||
test("guided-plan-milestone prompt references DB-backed planning tool and explicitly forbids manual roadmap writes", () => {
|
||||
const prompt = readPrompt("guided-plan-milestone");
|
||||
assert.match(prompt, /gsd_plan_milestone/);
|
||||
assert.match(prompt, /Do \*\*not\*\* write `?\{\{milestoneId\}\}-ROADMAP\.md`?, `?ROADMAP\.md`?, or other planning artifacts manually/i);
|
||||
});
|
||||
|
||||
test("plan-slice prompt no longer frames direct PLAN writes as the source of truth", () => {
|
||||
const prompt = readPrompt("plan-slice");
|
||||
assert.match(prompt, /Do \*\*not\*\* rely on direct `PLAN\.md` writes as the source of truth/i);
|
||||
});
|
||||
|
||||
test("plan-slice prompt explicitly names gsd_plan_slice and gsd_plan_task as DB-backed planning tools", () => {
|
||||
const prompt = readPrompt("plan-slice");
|
||||
assert.match(prompt, /gsd_plan_slice/);
|
||||
assert.match(prompt, /gsd_plan_task/);
|
||||
// The prompt should describe these as the canonical write path
|
||||
assert.match(prompt, /DB-backed tools are the canonical write path/i);
|
||||
});
|
||||
|
||||
test("plan-slice prompt does not instruct direct file writes as a primary step", () => {
|
||||
const prompt = readPrompt("plan-slice");
|
||||
// Should not instruct to "Write {{outputPath}}" as a primary step — tools handle rendering
|
||||
assert.doesNotMatch(prompt, /^\d+\.\s+Write `?\{\{outputPath\}\}`?\s*$/m);
|
||||
});
|
||||
|
||||
test("plan-slice prompt instructs calling gsd_plan_task for each task", () => {
|
||||
const prompt = readPrompt("plan-slice");
|
||||
assert.match(prompt, /call `gsd_plan_task` for each task/i);
|
||||
});
|
||||
|
||||
test("replan-slice prompt requires DB-backed planning state when available", () => {
|
||||
const prompt = readPrompt("replan-slice");
|
||||
assert.match(prompt, /DB-backed planning tool exists for this phase, use it as the source of truth/i);
|
||||
});
|
||||
|
||||
test("reassess-roadmap prompt references gsd_reassess_roadmap tool", () => {
|
||||
const prompt = readPrompt("reassess-roadmap");
|
||||
assert.match(prompt, /gsd_reassess_roadmap/);
|
||||
});
|
||||
|
||||
// ─── Prompt migration: replan-slice → gsd_replan_slice ────────────────
|
||||
|
||||
test("replan-slice prompt names gsd_replan_slice as the tool to use", () => {
|
||||
const prompt = readPrompt("replan-slice");
|
||||
assert.match(prompt, /gsd_replan_slice/);
|
||||
});
|
||||
|
||||
// ─── Prompt migration: reassess-roadmap → gsd_reassess_roadmap ───────
|
||||
|
||||
test("reassess-roadmap prompt names gsd_reassess_roadmap as the tool to use", () => {
|
||||
const prompt = readPrompt("reassess-roadmap");
|
||||
assert.match(prompt, /gsd_reassess_roadmap/);
|
||||
});
|
||||
|
||||
test("reactive-execute prompt references tool calls instead of checkbox updates", () => {
|
||||
const prompt = readPrompt("reactive-execute");
|
||||
assert.doesNotMatch(prompt, /checkbox updates/);
|
||||
|
|
|
|||
325
src/resources/extensions/gsd/tests/reassess-handler.test.ts
Normal file
325
src/resources/extensions/gsd/tests/reassess-handler.test.ts
Normal file
|
|
@ -0,0 +1,325 @@
|
|||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, mkdirSync, rmSync, existsSync, readFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import {
|
||||
openDatabase,
|
||||
closeDatabase,
|
||||
insertMilestone,
|
||||
insertSlice,
|
||||
getSlice,
|
||||
getMilestoneSlices,
|
||||
getAssessment,
|
||||
_getAdapter,
|
||||
} from '../gsd-db.ts';
|
||||
import { handleReassessRoadmap } from '../tools/reassess-roadmap.ts';
|
||||
|
||||
function makeTmpBase(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-reassess-'));
|
||||
mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01'), { recursive: true });
|
||||
mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S02'), { recursive: true });
|
||||
mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S03'), { recursive: true });
|
||||
return base;
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
try { closeDatabase(); } catch { /* noop */ }
|
||||
try { rmSync(base, { recursive: true, force: true }); } catch { /* noop */ }
|
||||
}
|
||||
|
||||
function seedMilestoneWithSlices(opts?: {
|
||||
s01Status?: string;
|
||||
s02Status?: string;
|
||||
s03Status?: string;
|
||||
}): void {
|
||||
insertMilestone({ id: 'M001', title: 'Test Milestone', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Slice One', status: opts?.s01Status ?? 'complete', demo: 'Demo one.' });
|
||||
insertSlice({ id: 'S02', milestoneId: 'M001', title: 'Slice Two', status: opts?.s02Status ?? 'pending', demo: 'Demo two.' });
|
||||
insertSlice({ id: 'S03', milestoneId: 'M001', title: 'Slice Three', status: opts?.s03Status ?? 'pending', demo: 'Demo three.' });
|
||||
}
|
||||
|
||||
function validReassessParams() {
|
||||
return {
|
||||
milestoneId: 'M001',
|
||||
completedSliceId: 'S01',
|
||||
verdict: 'confirmed',
|
||||
assessment: 'S01 completed successfully. Roadmap is on track.',
|
||||
sliceChanges: {
|
||||
modified: [
|
||||
{
|
||||
sliceId: 'S02',
|
||||
title: 'Updated Slice Two',
|
||||
risk: 'high',
|
||||
depends: ['S01'],
|
||||
demo: 'Updated demo two.',
|
||||
},
|
||||
],
|
||||
added: [
|
||||
{
|
||||
sliceId: 'S04',
|
||||
title: 'New Slice Four',
|
||||
risk: 'low',
|
||||
depends: ['S02'],
|
||||
demo: 'Demo four.',
|
||||
},
|
||||
],
|
||||
removed: ['S03'],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Tests ────────────────────────────────────────────────────────────────
|
||||
|
||||
test('handleReassessRoadmap rejects invalid payloads (missing milestoneId)', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedMilestoneWithSlices();
|
||||
const result = await handleReassessRoadmap({ ...validReassessParams(), milestoneId: '' }, base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /validation failed/);
|
||||
assert.match(result.error, /milestoneId/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReassessRoadmap rejects missing milestone', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
// No milestone seeded
|
||||
const result = await handleReassessRoadmap(validReassessParams(), base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /not found/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReassessRoadmap rejects structural violation: modifying a completed slice', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' });
|
||||
|
||||
const result = await handleReassessRoadmap({
|
||||
...validReassessParams(),
|
||||
sliceChanges: {
|
||||
modified: [{ sliceId: 'S01', title: 'Trying to modify completed S01' }],
|
||||
added: [],
|
||||
removed: [],
|
||||
},
|
||||
}, base);
|
||||
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /completed slice/);
|
||||
assert.match(result.error, /S01/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReassessRoadmap rejects structural violation: removing a completed slice', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' });
|
||||
|
||||
const result = await handleReassessRoadmap({
|
||||
...validReassessParams(),
|
||||
sliceChanges: {
|
||||
modified: [],
|
||||
added: [],
|
||||
removed: ['S01'],
|
||||
},
|
||||
}, base);
|
||||
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /completed slice/);
|
||||
assert.match(result.error, /S01/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReassessRoadmap succeeds when modifying only pending slices', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' });
|
||||
|
||||
const params = validReassessParams();
|
||||
const result = await handleReassessRoadmap(params, base);
|
||||
assert.ok(!('error' in result), `unexpected error: ${'error' in result ? result.error : ''}`);
|
||||
|
||||
// Verify assessments row exists in DB
|
||||
const assessmentPath = join('.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-ASSESSMENT.md');
|
||||
const assessment = getAssessment(assessmentPath);
|
||||
assert.ok(assessment, 'assessment row should exist in DB');
|
||||
assert.equal(assessment['milestone_id'], 'M001');
|
||||
assert.equal(assessment['status'], 'confirmed');
|
||||
assert.equal(assessment['scope'], 'roadmap');
|
||||
assert.ok((assessment['full_content'] as string).includes('S01 completed successfully'), 'assessment content should be stored');
|
||||
|
||||
// Verify S02 was updated
|
||||
const s02 = getSlice('M001', 'S02');
|
||||
assert.ok(s02, 'S02 should still exist');
|
||||
assert.equal(s02?.title, 'Updated Slice Two');
|
||||
assert.equal(s02?.risk, 'high');
|
||||
assert.equal(s02?.demo, 'Updated demo two.');
|
||||
|
||||
// Verify S03 was deleted
|
||||
const s03 = getSlice('M001', 'S03');
|
||||
assert.equal(s03, null, 'S03 should have been deleted');
|
||||
|
||||
// Verify S04 was inserted
|
||||
const s04 = getSlice('M001', 'S04');
|
||||
assert.ok(s04, 'S04 should exist as a new slice');
|
||||
assert.equal(s04?.title, 'New Slice Four');
|
||||
assert.equal(s04?.status, 'pending');
|
||||
|
||||
// Verify S01 (completed) was NOT touched
|
||||
const s01 = getSlice('M001', 'S01');
|
||||
assert.ok(s01, 'S01 should still exist');
|
||||
assert.equal(s01?.status, 'complete');
|
||||
|
||||
// Verify ROADMAP.md re-rendered on disk
|
||||
const roadmapPath = join(base, '.gsd', 'milestones', 'M001', 'M001-ROADMAP.md');
|
||||
assert.ok(existsSync(roadmapPath), 'ROADMAP.md should be rendered to disk');
|
||||
const roadmapContent = readFileSync(roadmapPath, 'utf-8');
|
||||
assert.ok(roadmapContent.includes('Updated Slice Two'), 'ROADMAP.md should contain updated S02 title');
|
||||
|
||||
// Verify ASSESSMENT.md exists on disk
|
||||
const assessmentDiskPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-ASSESSMENT.md');
|
||||
assert.ok(existsSync(assessmentDiskPath), 'ASSESSMENT.md should be rendered to disk');
|
||||
const assessmentContent = readFileSync(assessmentDiskPath, 'utf-8');
|
||||
assert.ok(assessmentContent.includes('confirmed'), 'ASSESSMENT.md should contain verdict');
|
||||
assert.ok(assessmentContent.includes('S01'), 'ASSESSMENT.md should reference completed slice');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReassessRoadmap cache invalidation: getMilestoneSlices reflects mutations', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' });
|
||||
|
||||
const params = validReassessParams();
|
||||
const result = await handleReassessRoadmap(params, base);
|
||||
assert.ok(!('error' in result), `unexpected error: ${'error' in result ? result.error : ''}`);
|
||||
|
||||
// After cache invalidation, DB queries should reflect mutations
|
||||
const slices = getMilestoneSlices('M001');
|
||||
const sliceIds = slices.map(s => s.id);
|
||||
|
||||
// S01 should remain (completed, untouched)
|
||||
assert.ok(sliceIds.includes('S01'), 'S01 should still exist after reassess');
|
||||
|
||||
// S02 should remain (modified, not removed)
|
||||
assert.ok(sliceIds.includes('S02'), 'S02 should still exist after reassess');
|
||||
|
||||
// S03 should be gone (removed)
|
||||
assert.ok(!sliceIds.includes('S03'), 'S03 should be gone after removal');
|
||||
|
||||
// S04 should exist (added)
|
||||
assert.ok(sliceIds.includes('S04'), 'S04 should exist after addition');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReassessRoadmap is idempotent: calling twice with same params succeeds', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'pending', s03Status: 'pending' });
|
||||
|
||||
// First call with full mutations
|
||||
const params = validReassessParams();
|
||||
const first = await handleReassessRoadmap(params, base);
|
||||
assert.ok(!('error' in first), `first call error: ${'error' in first ? first.error : ''}`);
|
||||
|
||||
// Second call — S03 already deleted, S04 already exists (INSERT OR IGNORE), S02 already updated
|
||||
// This should still succeed because:
|
||||
// - assessments uses INSERT OR REPLACE (path PK)
|
||||
// - S04 insert uses INSERT OR IGNORE
|
||||
// - S02 update is idempotent
|
||||
// - S03 delete on nonexistent is a no-op
|
||||
const second = await handleReassessRoadmap(params, base);
|
||||
assert.ok(!('error' in second), `second call error: ${'error' in second ? second.error : ''}`);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReassessRoadmap rejects slice with status "done" (alias for complete)', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedMilestoneWithSlices({ s01Status: 'done', s02Status: 'pending', s03Status: 'pending' });
|
||||
|
||||
const result = await handleReassessRoadmap({
|
||||
...validReassessParams(),
|
||||
sliceChanges: {
|
||||
modified: [{ sliceId: 'S01', title: 'Trying to modify done S01' }],
|
||||
added: [],
|
||||
removed: [],
|
||||
},
|
||||
}, base);
|
||||
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /completed slice/);
|
||||
assert.match(result.error, /S01/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReassessRoadmap returns structured error payloads with actionable messages', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedMilestoneWithSlices({ s01Status: 'complete', s02Status: 'complete', s03Status: 'pending' });
|
||||
|
||||
// Try to modify S01 (completed)
|
||||
const modifyResult = await handleReassessRoadmap({
|
||||
...validReassessParams(),
|
||||
sliceChanges: {
|
||||
modified: [{ sliceId: 'S01', title: 'x' }],
|
||||
added: [],
|
||||
removed: [],
|
||||
},
|
||||
}, base);
|
||||
assert.ok('error' in modifyResult);
|
||||
assert.ok(typeof modifyResult.error === 'string', 'error should be a string');
|
||||
assert.ok(modifyResult.error.includes('S01'), 'error should name the specific slice ID S01');
|
||||
|
||||
// Try to remove S02 (completed)
|
||||
const removeResult = await handleReassessRoadmap({
|
||||
...validReassessParams(),
|
||||
sliceChanges: {
|
||||
modified: [],
|
||||
added: [],
|
||||
removed: ['S02'],
|
||||
},
|
||||
}, base);
|
||||
assert.ok('error' in removeResult);
|
||||
assert.ok(removeResult.error.includes('S02'), 'error should name the specific slice ID S02');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
410
src/resources/extensions/gsd/tests/replan-handler.test.ts
Normal file
410
src/resources/extensions/gsd/tests/replan-handler.test.ts
Normal file
|
|
@ -0,0 +1,410 @@
|
|||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, mkdirSync, rmSync, readFileSync, existsSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import {
|
||||
openDatabase,
|
||||
closeDatabase,
|
||||
insertMilestone,
|
||||
insertSlice,
|
||||
insertTask,
|
||||
upsertTaskPlanning,
|
||||
getSliceTasks,
|
||||
getTask,
|
||||
getReplanHistory,
|
||||
_getAdapter,
|
||||
} from '../gsd-db.ts';
|
||||
import { handleReplanSlice } from '../tools/replan-slice.ts';
|
||||
import { parsePlan } from '../parsers-legacy.ts';
|
||||
|
||||
function makeTmpBase(): string {
|
||||
const base = mkdtempSync(join(tmpdir(), 'gsd-replan-'));
|
||||
mkdirSync(join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'tasks'), { recursive: true });
|
||||
return base;
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
try { closeDatabase(); } catch { /* noop */ }
|
||||
try { rmSync(base, { recursive: true, force: true }); } catch { /* noop */ }
|
||||
}
|
||||
|
||||
function seedSliceWithTasks(opts?: {
|
||||
t01Status?: string;
|
||||
t02Status?: string;
|
||||
t03Status?: string;
|
||||
}): void {
|
||||
insertMilestone({ id: 'M001', title: 'Test Milestone', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Test Slice', status: 'active', demo: 'Demo.' });
|
||||
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'Task One', status: opts?.t01Status ?? 'complete' });
|
||||
upsertTaskPlanning('M001', 'S01', 'T01', {
|
||||
description: 'First task description.',
|
||||
estimate: '30m',
|
||||
files: ['src/a.ts'],
|
||||
verify: 'node --test a.test.ts',
|
||||
inputs: ['src/a.ts'],
|
||||
expectedOutput: ['src/a.ts'],
|
||||
});
|
||||
|
||||
insertTask({ id: 'T02', sliceId: 'S01', milestoneId: 'M001', title: 'Task Two', status: opts?.t02Status ?? 'pending' });
|
||||
upsertTaskPlanning('M001', 'S01', 'T02', {
|
||||
description: 'Second task description.',
|
||||
estimate: '45m',
|
||||
files: ['src/b.ts'],
|
||||
verify: 'node --test b.test.ts',
|
||||
inputs: ['src/b.ts'],
|
||||
expectedOutput: ['src/b.ts'],
|
||||
});
|
||||
|
||||
if (opts?.t03Status !== undefined || !opts) {
|
||||
insertTask({ id: 'T03', sliceId: 'S01', milestoneId: 'M001', title: 'Task Three', status: opts?.t03Status ?? 'pending' });
|
||||
upsertTaskPlanning('M001', 'S01', 'T03', {
|
||||
description: 'Third task description.',
|
||||
estimate: '20m',
|
||||
files: ['src/c.ts'],
|
||||
verify: 'node --test c.test.ts',
|
||||
inputs: ['src/c.ts'],
|
||||
expectedOutput: ['src/c.ts'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function validReplanParams() {
|
||||
return {
|
||||
milestoneId: 'M001',
|
||||
sliceId: 'S01',
|
||||
blockerTaskId: 'T01',
|
||||
blockerDescription: 'T01 discovered a blocker in the API.',
|
||||
whatChanged: 'Updated T02 to use new API, removed T03, added T04.',
|
||||
updatedTasks: [
|
||||
{
|
||||
taskId: 'T02',
|
||||
title: 'Updated Task Two',
|
||||
description: 'Revised description for T02.',
|
||||
estimate: '1h',
|
||||
files: ['src/b-v2.ts'],
|
||||
verify: 'node --test b-v2.test.ts',
|
||||
inputs: ['src/b.ts'],
|
||||
expectedOutput: ['src/b-v2.ts'],
|
||||
},
|
||||
],
|
||||
removedTaskIds: ['T03'],
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Tests ────────────────────────────────────────────────────────────────
|
||||
|
||||
test('handleReplanSlice rejects invalid payloads (missing milestoneId)', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedSliceWithTasks();
|
||||
const result = await handleReplanSlice({ ...validReplanParams(), milestoneId: '' }, base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /validation failed/);
|
||||
assert.match(result.error, /milestoneId/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReplanSlice rejects structural violation: updating a completed task', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending' });
|
||||
|
||||
const result = await handleReplanSlice({
|
||||
...validReplanParams(),
|
||||
updatedTasks: [
|
||||
{
|
||||
taskId: 'T01',
|
||||
title: 'Trying to update completed T01',
|
||||
description: 'Should be rejected.',
|
||||
estimate: '1h',
|
||||
files: [],
|
||||
verify: '',
|
||||
inputs: [],
|
||||
expectedOutput: [],
|
||||
},
|
||||
],
|
||||
removedTaskIds: [],
|
||||
}, base);
|
||||
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /completed task/);
|
||||
assert.match(result.error, /T01/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReplanSlice rejects structural violation: removing a completed task', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending' });
|
||||
|
||||
const result = await handleReplanSlice({
|
||||
...validReplanParams(),
|
||||
updatedTasks: [],
|
||||
removedTaskIds: ['T01'],
|
||||
}, base);
|
||||
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /completed task/);
|
||||
assert.match(result.error, /T01/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReplanSlice succeeds when modifying only incomplete tasks', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending', t03Status: 'pending' });
|
||||
|
||||
const params = {
|
||||
...validReplanParams(),
|
||||
updatedTasks: [
|
||||
{
|
||||
taskId: 'T02',
|
||||
title: 'Updated Task Two',
|
||||
description: 'Revised description for T02.',
|
||||
estimate: '1h',
|
||||
files: ['src/b-v2.ts'],
|
||||
verify: 'node --test b-v2.test.ts',
|
||||
inputs: ['src/b.ts'],
|
||||
expectedOutput: ['src/b-v2.ts'],
|
||||
},
|
||||
{
|
||||
taskId: 'T04',
|
||||
title: 'New Task Four',
|
||||
description: 'Brand new task added during replan.',
|
||||
estimate: '30m',
|
||||
files: ['src/d.ts'],
|
||||
verify: 'node --test d.test.ts',
|
||||
inputs: [],
|
||||
expectedOutput: ['src/d.ts'],
|
||||
},
|
||||
],
|
||||
removedTaskIds: ['T03'],
|
||||
};
|
||||
|
||||
const result = await handleReplanSlice(params, base);
|
||||
assert.ok(!('error' in result), `unexpected error: ${'error' in result ? result.error : ''}`);
|
||||
|
||||
// Verify replan_history row exists
|
||||
const history = getReplanHistory('M001', 'S01');
|
||||
assert.ok(history.length > 0, 'replan_history should have at least one entry');
|
||||
assert.equal(history[0]['milestone_id'], 'M001');
|
||||
assert.equal(history[0]['slice_id'], 'S01');
|
||||
assert.equal(history[0]['task_id'], 'T01');
|
||||
|
||||
// Verify T02 was updated
|
||||
const t02 = getTask('M001', 'S01', 'T02');
|
||||
assert.ok(t02, 'T02 should still exist');
|
||||
assert.equal(t02?.title, 'Updated Task Two');
|
||||
assert.equal(t02?.description, 'Revised description for T02.');
|
||||
|
||||
// Verify T03 was deleted
|
||||
const t03 = getTask('M001', 'S01', 'T03');
|
||||
assert.equal(t03, null, 'T03 should have been deleted');
|
||||
|
||||
// Verify T04 was inserted
|
||||
const t04 = getTask('M001', 'S01', 'T04');
|
||||
assert.ok(t04, 'T04 should exist as a new task');
|
||||
assert.equal(t04?.title, 'New Task Four');
|
||||
assert.equal(t04?.status, 'pending');
|
||||
|
||||
// Verify T01 (completed) was NOT touched
|
||||
const t01 = getTask('M001', 'S01', 'T01');
|
||||
assert.ok(t01, 'T01 should still exist');
|
||||
assert.equal(t01?.status, 'complete');
|
||||
|
||||
// Verify rendered PLAN.md exists on disk
|
||||
const planPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md');
|
||||
assert.ok(existsSync(planPath), 'PLAN.md should be rendered to disk');
|
||||
|
||||
// Verify REPLAN.md exists on disk
|
||||
const replanPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-REPLAN.md');
|
||||
assert.ok(existsSync(replanPath), 'REPLAN.md should be rendered to disk');
|
||||
const replanContent = readFileSync(replanPath, 'utf-8');
|
||||
assert.ok(replanContent.includes('Blocker Description'), 'REPLAN.md should contain blocker section');
|
||||
assert.ok(replanContent.includes('T01'), 'REPLAN.md should reference blocker task');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReplanSlice cache invalidation: re-parsing PLAN.md reflects mutations', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending', t03Status: 'pending' });
|
||||
|
||||
const params = {
|
||||
...validReplanParams(),
|
||||
updatedTasks: [
|
||||
{
|
||||
taskId: 'T02',
|
||||
title: 'Cache-Test Updated T02',
|
||||
description: 'This title should appear in re-parsed plan.',
|
||||
estimate: '1h',
|
||||
files: ['src/b.ts'],
|
||||
verify: 'test',
|
||||
inputs: [],
|
||||
expectedOutput: [],
|
||||
},
|
||||
],
|
||||
removedTaskIds: ['T03'],
|
||||
};
|
||||
|
||||
const result = await handleReplanSlice(params, base);
|
||||
assert.ok(!('error' in result), `unexpected error: ${'error' in result ? result.error : ''}`);
|
||||
|
||||
// Re-parse PLAN.md from disk to verify cache invalidation worked
|
||||
const planPath = join(base, '.gsd', 'milestones', 'M001', 'slices', 'S01', 'S01-PLAN.md');
|
||||
const content = readFileSync(planPath, 'utf-8');
|
||||
const parsed = parsePlan(content);
|
||||
|
||||
// T01 should still be present (completed, untouched)
|
||||
const t01Task = parsed.tasks.find(t => t.id === 'T01');
|
||||
assert.ok(t01Task, 'completed T01 should remain in parsed plan');
|
||||
|
||||
// T02 should show updated title
|
||||
const t02Task = parsed.tasks.find(t => t.id === 'T02');
|
||||
assert.ok(t02Task, 'T02 should be in parsed plan');
|
||||
assert.ok(t02Task?.title?.includes('Cache-Test Updated T02'), 'T02 title should be updated');
|
||||
|
||||
// T03 should be gone
|
||||
const t03Task = parsed.tasks.find(t => t.id === 'T03');
|
||||
assert.equal(t03Task, undefined, 'T03 should not appear in parsed plan after removal');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReplanSlice is idempotent: calling twice with same params succeeds', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedSliceWithTasks({ t01Status: 'complete', t02Status: 'pending', t03Status: 'pending' });
|
||||
|
||||
const params = {
|
||||
...validReplanParams(),
|
||||
updatedTasks: [
|
||||
{
|
||||
taskId: 'T02',
|
||||
title: 'Idempotent Update',
|
||||
description: 'Same update applied twice.',
|
||||
estimate: '1h',
|
||||
files: ['src/b.ts'],
|
||||
verify: 'test',
|
||||
inputs: [],
|
||||
expectedOutput: [],
|
||||
},
|
||||
],
|
||||
removedTaskIds: ['T03'],
|
||||
};
|
||||
|
||||
const first = await handleReplanSlice(params, base);
|
||||
assert.ok(!('error' in first), `first call error: ${'error' in first ? first.error : ''}`);
|
||||
|
||||
const second = await handleReplanSlice(params, base);
|
||||
assert.ok(!('error' in second), `second call error: ${'error' in second ? second.error : ''}`);
|
||||
|
||||
// Both should succeed and replan_history should have 2 entries
|
||||
const history = getReplanHistory('M001', 'S01');
|
||||
assert.ok(history.length >= 2, 'replan_history should have at least 2 entries after idempotent rerun');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReplanSlice returns missing parent slice error', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Milestone', status: 'active' });
|
||||
// No slice inserted
|
||||
|
||||
const result = await handleReplanSlice(validReplanParams(), base);
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /missing parent slice/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReplanSlice rejects task with status "done" (alias for complete)', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedSliceWithTasks({ t01Status: 'done', t02Status: 'pending' });
|
||||
|
||||
const result = await handleReplanSlice({
|
||||
...validReplanParams(),
|
||||
updatedTasks: [
|
||||
{
|
||||
taskId: 'T01',
|
||||
title: 'Trying to update done T01',
|
||||
description: 'Should be rejected.',
|
||||
estimate: '1h',
|
||||
files: [],
|
||||
verify: '',
|
||||
inputs: [],
|
||||
expectedOutput: [],
|
||||
},
|
||||
],
|
||||
removedTaskIds: [],
|
||||
}, base);
|
||||
|
||||
assert.ok('error' in result);
|
||||
assert.match(result.error, /completed task/);
|
||||
assert.match(result.error, /T01/);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('handleReplanSlice returns structured error payloads with actionable messages', async () => {
|
||||
const base = makeTmpBase();
|
||||
openDatabase(join(base, '.gsd', 'gsd.db'));
|
||||
|
||||
try {
|
||||
seedSliceWithTasks({ t01Status: 'complete', t02Status: 'complete', t03Status: 'pending' });
|
||||
|
||||
// Try to modify T01 (completed)
|
||||
const modifyResult = await handleReplanSlice({
|
||||
...validReplanParams(),
|
||||
updatedTasks: [{ taskId: 'T01', title: 'x', description: '', estimate: '', files: [], verify: '', inputs: [], expectedOutput: [] }],
|
||||
removedTaskIds: [],
|
||||
}, base);
|
||||
assert.ok('error' in modifyResult);
|
||||
assert.ok(typeof modifyResult.error === 'string', 'error should be a string');
|
||||
assert.ok(modifyResult.error.includes('T01'), 'error should name the specific task ID');
|
||||
|
||||
// Try to remove T02 (completed)
|
||||
const removeResult = await handleReplanSlice({
|
||||
...validReplanParams(),
|
||||
updatedTasks: [],
|
||||
removedTaskIds: ['T02'],
|
||||
}, base);
|
||||
assert.ok('error' in removeResult);
|
||||
assert.ok(removeResult.error.includes('T02'), 'error should name the specific task ID T02');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { parseRoadmap } from "../files.ts";
|
||||
import { parseRoadmap } from "../parsers-legacy.ts";
|
||||
import { parseRoadmapSlices, expandDependencies } from "../roadmap-slices.ts";
|
||||
|
||||
const content = `# M003: Current
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import { join } from "node:path";
|
|||
import { tmpdir } from "node:os";
|
||||
|
||||
import { detectRogueFileWrites } from "../auto-post-unit.ts";
|
||||
import { openDatabase, closeDatabase, isDbAvailable, insertMilestone, insertSlice, insertTask, updateSliceStatus } from "../gsd-db.ts";
|
||||
import { openDatabase, closeDatabase, isDbAvailable, insertMilestone, insertSlice, insertTask, updateSliceStatus, upsertMilestonePlanning } from "../gsd-db.ts";
|
||||
|
||||
// ── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -41,6 +41,23 @@ function createSliceSummaryOnDisk(basePath: string, mid: string, sid: string): s
|
|||
return summaryFile;
|
||||
}
|
||||
|
||||
function createRoadmapOnDisk(basePath: string, mid: string): string {
|
||||
const milestoneDir = join(basePath, ".gsd", "milestones", mid);
|
||||
mkdirSync(milestoneDir, { recursive: true });
|
||||
const roadmapFile = join(milestoneDir, `${mid}-ROADMAP.md`);
|
||||
writeFileSync(roadmapFile, `# ${mid}: Test Roadmap\n`, "utf-8");
|
||||
return roadmapFile;
|
||||
}
|
||||
|
||||
function createSlicePlanOnDisk(basePath: string, mid: string, sid: string): string {
|
||||
const sliceDir = join(basePath, ".gsd", "milestones", mid, "slices", sid);
|
||||
mkdirSync(sliceDir, { recursive: true });
|
||||
const planFile = join(sliceDir, `${sid}-PLAN.md`);
|
||||
writeFileSync(planFile, `# ${sid}: Test Plan\n`, "utf-8");
|
||||
return planFile;
|
||||
}
|
||||
|
||||
|
||||
// ── Tests ────────────────────────────────────────────────────────────────────
|
||||
|
||||
test("rogue detection: task summary on disk, no DB row → detected as rogue", () => {
|
||||
|
|
@ -183,3 +200,97 @@ test("rogue detection: slice summary on disk, DB row with status 'complete' →
|
|||
rmSync(basePath, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("rogue detection: plan milestone roadmap on disk, no milestone planning row → detected as rogue", () => {
|
||||
const basePath = createTmpBase();
|
||||
const dbPath = join(basePath, ".gsd", "gsd.db");
|
||||
mkdirSync(join(basePath, ".gsd"), { recursive: true });
|
||||
|
||||
try {
|
||||
openDatabase(dbPath);
|
||||
|
||||
const roadmapPath = createRoadmapOnDisk(basePath, "M001");
|
||||
assert.ok(existsSync(roadmapPath), "Roadmap file should exist on disk");
|
||||
|
||||
const rogues = detectRogueFileWrites("plan-milestone", "M001", basePath);
|
||||
assert.equal(rogues.length, 1, "Should detect one rogue roadmap file");
|
||||
assert.equal(rogues[0].path, roadmapPath);
|
||||
assert.equal(rogues[0].unitType, "plan-milestone");
|
||||
assert.equal(rogues[0].unitId, "M001");
|
||||
} finally {
|
||||
closeDatabase();
|
||||
rmSync(basePath, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("rogue detection: plan milestone roadmap on disk, DB milestone planning row exists → NOT rogue", () => {
|
||||
const basePath = createTmpBase();
|
||||
const dbPath = join(basePath, ".gsd", "gsd.db");
|
||||
mkdirSync(join(basePath, ".gsd"), { recursive: true });
|
||||
|
||||
try {
|
||||
openDatabase(dbPath);
|
||||
|
||||
createRoadmapOnDisk(basePath, "M001");
|
||||
insertMilestone({ id: "M001", title: "Planned Milestone" });
|
||||
upsertMilestonePlanning("M001", {
|
||||
vision: "Real planning state",
|
||||
requirementCoverage: "R001 → S01",
|
||||
boundaryMapMarkdown: "- planner → db",
|
||||
});
|
||||
|
||||
const rogues = detectRogueFileWrites("plan-milestone", "M001", basePath);
|
||||
assert.equal(rogues.length, 0, "Should NOT detect rogue when milestone planning state exists");
|
||||
} finally {
|
||||
closeDatabase();
|
||||
rmSync(basePath, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("rogue detection: slice plan on disk, no slice planning row → detected as rogue", () => {
|
||||
const basePath = createTmpBase();
|
||||
const dbPath = join(basePath, ".gsd", "gsd.db");
|
||||
mkdirSync(join(basePath, ".gsd"), { recursive: true });
|
||||
|
||||
try {
|
||||
openDatabase(dbPath);
|
||||
|
||||
const planPath = createSlicePlanOnDisk(basePath, "M001", "S01");
|
||||
assert.ok(existsSync(planPath), "Slice plan file should exist on disk");
|
||||
|
||||
const rogues = detectRogueFileWrites("plan-slice", "M001/S01", basePath);
|
||||
assert.equal(rogues.length, 1, "Should detect one rogue slice plan file");
|
||||
assert.equal(rogues[0].path, planPath);
|
||||
assert.equal(rogues[0].unitType, "plan-slice");
|
||||
assert.equal(rogues[0].unitId, "M001/S01");
|
||||
} finally {
|
||||
closeDatabase();
|
||||
rmSync(basePath, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("rogue detection: slice plan on disk, DB slice planning row exists → NOT rogue", () => {
|
||||
const basePath = createTmpBase();
|
||||
const dbPath = join(basePath, ".gsd", "gsd.db");
|
||||
mkdirSync(join(basePath, ".gsd"), { recursive: true });
|
||||
|
||||
try {
|
||||
openDatabase(dbPath);
|
||||
|
||||
createSlicePlanOnDisk(basePath, "M001", "S01");
|
||||
insertMilestone({ id: "M001" });
|
||||
insertSlice({
|
||||
milestoneId: "M001",
|
||||
id: "S01",
|
||||
title: "Planned Slice",
|
||||
status: "pending",
|
||||
demo: "Observable plan",
|
||||
});
|
||||
|
||||
const rogues = detectRogueFileWrites("plan-slice", "M001/S01", basePath);
|
||||
assert.equal(rogues.length, 0, "Should NOT detect rogue when slice planning state exists");
|
||||
} finally {
|
||||
closeDatabase();
|
||||
rmSync(basePath, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
|
|
|||
176
src/resources/extensions/gsd/tests/schema-v9-sequence.test.ts
Normal file
176
src/resources/extensions/gsd/tests/schema-v9-sequence.test.ts
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
|
||||
import {
|
||||
openDatabase,
|
||||
closeDatabase,
|
||||
insertMilestone,
|
||||
insertSlice,
|
||||
insertTask,
|
||||
getMilestoneSlices,
|
||||
getSliceTasks,
|
||||
getActiveSliceFromDb,
|
||||
getActiveTaskFromDb,
|
||||
} from '../gsd-db.ts';
|
||||
|
||||
function makeTmp(): string {
|
||||
return mkdtempSync(join(tmpdir(), 'gsd-v9-'));
|
||||
}
|
||||
|
||||
function cleanup(base: string): void {
|
||||
try { closeDatabase(); } catch { /* noop */ }
|
||||
try { rmSync(base, { recursive: true, force: true }); } catch { /* noop */ }
|
||||
}
|
||||
|
||||
test('schema v9: migration adds sequence column to slices and tasks', () => {
|
||||
const base = makeTmp();
|
||||
const dbPath = join(base, 'gsd.db');
|
||||
openDatabase(dbPath);
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Test', status: 'active' });
|
||||
// If sequence column doesn't exist, these would throw
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Slice 1', sequence: 5 });
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'Task 1', sequence: 3 });
|
||||
|
||||
const slices = getMilestoneSlices('M001');
|
||||
assert.equal(slices.length, 1);
|
||||
assert.equal(slices[0]!.sequence, 5);
|
||||
|
||||
const tasks = getSliceTasks('M001', 'S01');
|
||||
assert.equal(tasks.length, 1);
|
||||
assert.equal(tasks[0]!.sequence, 3);
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('schema v9: getMilestoneSlices returns slices ordered by sequence then id', () => {
|
||||
const base = makeTmp();
|
||||
openDatabase(join(base, 'gsd.db'));
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Test', status: 'active' });
|
||||
|
||||
// Insert in reverse lexicographic order with sequence overriding id order
|
||||
insertSlice({ id: 'S03', milestoneId: 'M001', title: 'Third by id, first by seq', sequence: 1 });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'First by id, third by seq', sequence: 3 });
|
||||
insertSlice({ id: 'S02', milestoneId: 'M001', title: 'Second by id, second by seq', sequence: 2 });
|
||||
|
||||
const slices = getMilestoneSlices('M001');
|
||||
assert.equal(slices.length, 3);
|
||||
assert.equal(slices[0]!.id, 'S03', 'sequence=1 should be first');
|
||||
assert.equal(slices[1]!.id, 'S02', 'sequence=2 should be second');
|
||||
assert.equal(slices[2]!.id, 'S01', 'sequence=3 should be third');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('schema v9: getSliceTasks returns tasks ordered by sequence then id', () => {
|
||||
const base = makeTmp();
|
||||
openDatabase(join(base, 'gsd.db'));
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Test', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Slice' });
|
||||
|
||||
// Insert tasks with sequence overriding id order
|
||||
insertTask({ id: 'T03', sliceId: 'S01', milestoneId: 'M001', title: 'Third by id', sequence: 1 });
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'First by id', sequence: 3 });
|
||||
insertTask({ id: 'T02', sliceId: 'S01', milestoneId: 'M001', title: 'Second by id', sequence: 2 });
|
||||
|
||||
const tasks = getSliceTasks('M001', 'S01');
|
||||
assert.equal(tasks.length, 3);
|
||||
assert.equal(tasks[0]!.id, 'T03', 'sequence=1 should be first');
|
||||
assert.equal(tasks[1]!.id, 'T02', 'sequence=2 should be second');
|
||||
assert.equal(tasks[2]!.id, 'T01', 'sequence=3 should be third');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('schema v9: default sequence (0) falls back to id-based ordering', () => {
|
||||
const base = makeTmp();
|
||||
openDatabase(join(base, 'gsd.db'));
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Test', status: 'active' });
|
||||
|
||||
// All slices with default sequence=0 should sort by id
|
||||
insertSlice({ id: 'S03', milestoneId: 'M001', title: 'Third' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'First' });
|
||||
insertSlice({ id: 'S02', milestoneId: 'M001', title: 'Second' });
|
||||
|
||||
const slices = getMilestoneSlices('M001');
|
||||
assert.equal(slices[0]!.id, 'S01', 'default seq=0: should sort by id');
|
||||
assert.equal(slices[1]!.id, 'S02');
|
||||
assert.equal(slices[2]!.id, 'S03');
|
||||
|
||||
// Same for tasks
|
||||
insertSlice({ id: 'S04', milestoneId: 'M001', title: 'Container' });
|
||||
insertTask({ id: 'T02', sliceId: 'S04', milestoneId: 'M001', title: 'B' });
|
||||
insertTask({ id: 'T01', sliceId: 'S04', milestoneId: 'M001', title: 'A' });
|
||||
insertTask({ id: 'T03', sliceId: 'S04', milestoneId: 'M001', title: 'C' });
|
||||
|
||||
const tasks = getSliceTasks('M001', 'S04');
|
||||
assert.equal(tasks[0]!.id, 'T01');
|
||||
assert.equal(tasks[1]!.id, 'T02');
|
||||
assert.equal(tasks[2]!.id, 'T03');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('schema v9: getActiveSliceFromDb respects sequence ordering', () => {
|
||||
const base = makeTmp();
|
||||
openDatabase(join(base, 'gsd.db'));
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Test', status: 'active' });
|
||||
|
||||
// S02 has lower sequence so should be active first despite higher id than S01
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Higher seq', status: 'pending', sequence: 5 });
|
||||
insertSlice({ id: 'S02', milestoneId: 'M001', title: 'Lower seq', status: 'pending', sequence: 2 });
|
||||
|
||||
const active = getActiveSliceFromDb('M001');
|
||||
assert.ok(active);
|
||||
assert.equal(active!.id, 'S02', 'lower sequence should be active first');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('schema v9: getActiveTaskFromDb respects sequence ordering', () => {
|
||||
const base = makeTmp();
|
||||
openDatabase(join(base, 'gsd.db'));
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Test', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'Slice' });
|
||||
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'Higher seq', status: 'pending', sequence: 10 });
|
||||
insertTask({ id: 'T02', sliceId: 'S01', milestoneId: 'M001', title: 'Lower seq', status: 'pending', sequence: 1 });
|
||||
|
||||
const active = getActiveTaskFromDb('M001', 'S01');
|
||||
assert.ok(active);
|
||||
assert.equal(active!.id, 'T02', 'lower sequence should be active first');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
||||
test('schema v9: sequence field defaults to 0 when not provided', () => {
|
||||
const base = makeTmp();
|
||||
openDatabase(join(base, 'gsd.db'));
|
||||
try {
|
||||
insertMilestone({ id: 'M001', title: 'Test', status: 'active' });
|
||||
insertSlice({ id: 'S01', milestoneId: 'M001', title: 'No seq' });
|
||||
insertTask({ id: 'T01', sliceId: 'S01', milestoneId: 'M001', title: 'No seq' });
|
||||
|
||||
const slices = getMilestoneSlices('M001');
|
||||
assert.equal(slices[0]!.sequence, 0, 'slice sequence defaults to 0');
|
||||
|
||||
const tasks = getSliceTasks('M001', 'S01');
|
||||
assert.equal(tasks[0]!.sequence, 0, 'task sequence defaults to 0');
|
||||
} finally {
|
||||
cleanup(base);
|
||||
}
|
||||
});
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// tool-naming — Verifies canonical + alias tool registration for GSD DB tools.
|
||||
//
|
||||
// Each of the 6 DB tools must register under its canonical gsd_concept_action name
|
||||
// AND under the old gsd_action_concept name as a backward-compatible alias.
|
||||
// Each DB tool must register under its canonical gsd_concept_action name
|
||||
// AND under a backward-compatible alias name.
|
||||
// The alias must share the exact same execute function reference as the canonical tool.
|
||||
|
||||
import { createTestContext } from './test-helpers.ts';
|
||||
|
|
@ -28,6 +28,11 @@ const RENAME_MAP: Array<{ canonical: string; alias: string }> = [
|
|||
{ canonical: "gsd_milestone_generate_id", alias: "gsd_generate_milestone_id" },
|
||||
{ canonical: "gsd_task_complete", alias: "gsd_complete_task" },
|
||||
{ canonical: "gsd_slice_complete", alias: "gsd_complete_slice" },
|
||||
{ canonical: "gsd_plan_milestone", alias: "gsd_milestone_plan" },
|
||||
{ canonical: "gsd_plan_slice", alias: "gsd_slice_plan" },
|
||||
{ canonical: "gsd_plan_task", alias: "gsd_task_plan" },
|
||||
{ canonical: "gsd_replan_slice", alias: "gsd_slice_replan" },
|
||||
{ canonical: "gsd_reassess_roadmap", alias: "gsd_roadmap_reassess" },
|
||||
];
|
||||
|
||||
// ─── Registration count ──────────────────────────────────────────────────────
|
||||
|
|
@ -37,7 +42,7 @@ console.log('\n── Tool naming: registration count ──');
|
|||
const pi = makeMockPi();
|
||||
registerDbTools(pi);
|
||||
|
||||
assertEq(pi.tools.length, 12, 'Should register exactly 12 tools (6 canonical + 6 aliases)');
|
||||
assertEq(pi.tools.length, 22, 'Should register exactly 22 tools (11 canonical + 11 aliases)');
|
||||
|
||||
// ─── Both names exist for each pair ──────────────────────────────────────────
|
||||
|
||||
|
|
|
|||
|
|
@ -240,148 +240,6 @@ test("verification-evidence: formatEvidenceTable uses ✅/❌ emoji for pass/fai
|
|||
assert.ok(table.includes("❌ fail"), "failing check should have ❌ fail");
|
||||
});
|
||||
|
||||
// ─── Validator Rule Tests (T03) ──────────────────────────────────────────────
|
||||
|
||||
import { validateTaskSummaryContent } from "../observability-validator.ts";
|
||||
|
||||
const MINIMAL_SUMMARY_WITH_EVIDENCE = `---
|
||||
observability_surfaces:
|
||||
- gate-output
|
||||
---
|
||||
# T03 Summary
|
||||
|
||||
## Diagnostics
|
||||
Run \`npm test\` to verify.
|
||||
|
||||
## Verification Evidence
|
||||
| # | Command | Exit Code | Verdict | Duration |
|
||||
|---|---------|-----------|---------|----------|
|
||||
| 1 | npm run typecheck | 0 | ✅ pass | 2.3s |
|
||||
`;
|
||||
|
||||
const MINIMAL_SUMMARY_NO_EVIDENCE = `---
|
||||
observability_surfaces:
|
||||
- gate-output
|
||||
---
|
||||
# T03 Summary
|
||||
|
||||
## Diagnostics
|
||||
Run \`npm test\` to verify.
|
||||
`;
|
||||
|
||||
const MINIMAL_SUMMARY_PLACEHOLDER_EVIDENCE = `---
|
||||
observability_surfaces:
|
||||
- gate-output
|
||||
---
|
||||
# T03 Summary
|
||||
|
||||
## Diagnostics
|
||||
Run \`npm test\` to verify.
|
||||
|
||||
## Verification Evidence
|
||||
{{evidence_table}}
|
||||
`;
|
||||
|
||||
const MINIMAL_SUMMARY_NO_CHECKS_EVIDENCE = `---
|
||||
observability_surfaces:
|
||||
- gate-output
|
||||
---
|
||||
# T03 Summary
|
||||
|
||||
## Diagnostics
|
||||
Run \`npm test\` to verify.
|
||||
|
||||
## Verification Evidence
|
||||
_No verification checks discovered._
|
||||
`;
|
||||
|
||||
test("verification-evidence: validator accepts summary with real evidence table", () => {
|
||||
const issues = validateTaskSummaryContent("T03-SUMMARY.md", MINIMAL_SUMMARY_WITH_EVIDENCE);
|
||||
const evidenceIssues = issues.filter(
|
||||
(i) => i.ruleId === "evidence_block_missing" || i.ruleId === "evidence_block_placeholder",
|
||||
);
|
||||
assert.equal(evidenceIssues.length, 0, "no evidence warnings for real table");
|
||||
});
|
||||
|
||||
test("verification-evidence: validator warns when evidence section is missing", () => {
|
||||
const issues = validateTaskSummaryContent("T03-SUMMARY.md", MINIMAL_SUMMARY_NO_EVIDENCE);
|
||||
const match = issues.find((i) => i.ruleId === "evidence_block_missing");
|
||||
assert.ok(match, "should produce evidence_block_missing warning");
|
||||
assert.equal(match!.severity, "warning");
|
||||
assert.equal(match!.scope, "task-summary");
|
||||
});
|
||||
|
||||
test("verification-evidence: validator warns when evidence section has only placeholder text", () => {
|
||||
const issues = validateTaskSummaryContent("T03-SUMMARY.md", MINIMAL_SUMMARY_PLACEHOLDER_EVIDENCE);
|
||||
const match = issues.find((i) => i.ruleId === "evidence_block_placeholder");
|
||||
assert.ok(match, "should produce evidence_block_placeholder warning");
|
||||
assert.equal(match!.severity, "warning");
|
||||
});
|
||||
|
||||
test("verification-evidence: validator accepts 'no checks discovered' as valid content", () => {
|
||||
const issues = validateTaskSummaryContent("T03-SUMMARY.md", MINIMAL_SUMMARY_NO_CHECKS_EVIDENCE);
|
||||
const evidenceIssues = issues.filter(
|
||||
(i) => i.ruleId === "evidence_block_missing" || i.ruleId === "evidence_block_placeholder",
|
||||
);
|
||||
assert.equal(evidenceIssues.length, 0, "no evidence warnings for 'no checks discovered'");
|
||||
});
|
||||
|
||||
// ─── Integration Test: Full Chain (T03) ──────────────────────────────────────
|
||||
|
||||
test("verification-evidence: integration — VerificationResult → JSON → table → validator accepts", () => {
|
||||
const tmp = makeTempDir("ve-integration");
|
||||
try {
|
||||
// 1. Create a VerificationResult with 2 checks (1 pass, 1 fail)
|
||||
const result = makeResult({
|
||||
passed: false,
|
||||
checks: [
|
||||
{ command: "npm run typecheck", exitCode: 0, stdout: "ok", stderr: "", durationMs: 1500 },
|
||||
{ command: "npm run test:unit", exitCode: 1, stdout: "", stderr: "1 failed", durationMs: 3200 },
|
||||
],
|
||||
discoverySource: "package-json",
|
||||
});
|
||||
|
||||
// 2. Write JSON to temp dir and read it back
|
||||
writeVerificationJSON(result, tmp, "T03");
|
||||
const jsonPath = join(tmp, "T03-VERIFY.json");
|
||||
assert.ok(existsSync(jsonPath), "JSON file should exist");
|
||||
|
||||
const json = JSON.parse(readFileSync(jsonPath, "utf-8"));
|
||||
assert.equal(json.schemaVersion, 1, "schemaVersion should be 1");
|
||||
assert.equal(json.passed, false, "passed should be false");
|
||||
assert.equal(json.checks.length, 2, "should have 2 checks");
|
||||
assert.equal(json.checks[0].verdict, "pass", "first check should pass");
|
||||
assert.equal(json.checks[1].verdict, "fail", "second check should fail");
|
||||
|
||||
// 3. Generate evidence table and embed in a mock summary
|
||||
const table = formatEvidenceTable(result);
|
||||
assert.ok(table.includes("npm run typecheck"), "table should contain first command");
|
||||
assert.ok(table.includes("npm run test:unit"), "table should contain second command");
|
||||
|
||||
const fullSummary = `---
|
||||
observability_surfaces:
|
||||
- gate-output
|
||||
---
|
||||
# T03 Summary
|
||||
|
||||
## Diagnostics
|
||||
Run \`npm test\` to verify.
|
||||
|
||||
## Verification Evidence
|
||||
${table}
|
||||
`;
|
||||
|
||||
// 4. Validate — no evidence warnings
|
||||
const issues = validateTaskSummaryContent("T03-SUMMARY.md", fullSummary);
|
||||
const evidenceIssues = issues.filter(
|
||||
(i) => i.ruleId === "evidence_block_missing" || i.ruleId === "evidence_block_placeholder",
|
||||
);
|
||||
assert.equal(evidenceIssues.length, 0, "validator should accept real evidence from formatEvidenceTable");
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Retry Evidence Field Tests (S03/T01) ─────────────────────────────────────
|
||||
|
||||
test("verification-evidence: writeVerificationJSON with retryAttempt and maxRetries includes them in output", () => {
|
||||
|
|
|
|||
|
|
@ -36,18 +36,24 @@ function createGitRepo(): string {
|
|||
* Returns true when the directory would PASS the health check (dispatch
|
||||
* proceeds), false when it would FAIL (dispatch blocked).
|
||||
*
|
||||
* This mirrors the fixed logic: .git must exist, AND at least one
|
||||
* PROJECT_FILES entry or a src/ directory must exist.
|
||||
* The only hard gate is .git — project files are advisory (greenfield
|
||||
* projects won't have them yet). Returns { pass, greenfield } to
|
||||
* distinguish "pass with project files" from "pass as greenfield".
|
||||
*/
|
||||
function wouldPassHealthCheck(basePath: string, existsSyncFn: (p: string) => boolean): boolean {
|
||||
const hasGit = existsSyncFn(join(basePath, ".git"));
|
||||
if (!hasGit) return false;
|
||||
|
||||
// .git is sufficient — greenfield projects proceed with a warning
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Whether the directory has recognized project files (used for greenfield detection). */
|
||||
function hasRecognizedProjectFiles(basePath: string, existsSyncFn: (p: string) => boolean): boolean {
|
||||
for (const file of PROJECT_FILES) {
|
||||
if (existsSyncFn(join(basePath, file))) return true;
|
||||
}
|
||||
if (existsSyncFn(join(basePath, "src"))) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
@ -118,8 +124,9 @@ describe("health check with git repo", () => {
|
|||
assert.ok(wouldPassHealthCheck(dir, existsSync), "src/-only project should pass health check");
|
||||
});
|
||||
|
||||
test("health check fails for empty git repo with no project files", () => {
|
||||
assert.ok(!wouldPassHealthCheck(dir, existsSync), "empty git repo should fail health check");
|
||||
test("health check passes for empty git repo (greenfield project)", () => {
|
||||
assert.ok(wouldPassHealthCheck(dir, existsSync), "empty git repo should pass health check (greenfield)");
|
||||
assert.ok(!hasRecognizedProjectFiles(dir, existsSync), "empty git repo has no recognized project files");
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
244
src/resources/extensions/gsd/tools/plan-milestone.ts
Normal file
244
src/resources/extensions/gsd/tools/plan-milestone.ts
Normal file
|
|
@ -0,0 +1,244 @@
|
|||
import { clearParseCache } from "../files.js";
|
||||
import {
|
||||
transaction,
|
||||
insertMilestone,
|
||||
insertSlice,
|
||||
upsertMilestonePlanning,
|
||||
upsertSlicePlanning,
|
||||
} from "../gsd-db.js";
|
||||
import { invalidateStateCache } from "../state.js";
|
||||
import { renderRoadmapFromDb } from "../markdown-renderer.js";
|
||||
|
||||
export interface PlanMilestoneSliceInput {
|
||||
sliceId: string;
|
||||
title: string;
|
||||
risk: string;
|
||||
depends: string[];
|
||||
demo: string;
|
||||
goal: string;
|
||||
successCriteria: string;
|
||||
proofLevel: string;
|
||||
integrationClosure: string;
|
||||
observabilityImpact: string;
|
||||
}
|
||||
|
||||
export interface PlanMilestoneParams {
|
||||
milestoneId: string;
|
||||
title: string;
|
||||
status?: string;
|
||||
dependsOn?: string[];
|
||||
vision: string;
|
||||
successCriteria: string[];
|
||||
keyRisks: Array<{ risk: string; whyItMatters: string }>;
|
||||
proofStrategy: Array<{ riskOrUnknown: string; retireIn: string; whatWillBeProven: string }>;
|
||||
verificationContract: string;
|
||||
verificationIntegration: string;
|
||||
verificationOperational: string;
|
||||
verificationUat: string;
|
||||
definitionOfDone: string[];
|
||||
requirementCoverage: string;
|
||||
boundaryMapMarkdown: string;
|
||||
slices: PlanMilestoneSliceInput[];
|
||||
}
|
||||
|
||||
export interface PlanMilestoneResult {
|
||||
milestoneId: string;
|
||||
roadmapPath: string;
|
||||
}
|
||||
|
||||
function isNonEmptyString(value: unknown): value is string {
|
||||
return typeof value === "string" && value.trim().length > 0;
|
||||
}
|
||||
|
||||
function validateStringArray(value: unknown, field: string): string[] {
|
||||
if (!Array.isArray(value)) {
|
||||
throw new Error(`${field} must be an array`);
|
||||
}
|
||||
if (value.some((item) => !isNonEmptyString(item))) {
|
||||
throw new Error(`${field} must contain only non-empty strings`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function validateRiskEntries(value: unknown): Array<{ risk: string; whyItMatters: string }> {
|
||||
if (!Array.isArray(value)) {
|
||||
throw new Error("keyRisks must be an array");
|
||||
}
|
||||
return value.map((entry, index) => {
|
||||
if (!entry || typeof entry !== "object") {
|
||||
throw new Error(`keyRisks[${index}] must be an object`);
|
||||
}
|
||||
const risk = (entry as Record<string, unknown>).risk;
|
||||
const whyItMatters = (entry as Record<string, unknown>).whyItMatters;
|
||||
if (!isNonEmptyString(risk) || !isNonEmptyString(whyItMatters)) {
|
||||
throw new Error(`keyRisks[${index}] must include non-empty risk and whyItMatters`);
|
||||
}
|
||||
return { risk, whyItMatters };
|
||||
});
|
||||
}
|
||||
|
||||
function validateProofStrategy(value: unknown): Array<{ riskOrUnknown: string; retireIn: string; whatWillBeProven: string }> {
|
||||
if (!Array.isArray(value)) {
|
||||
throw new Error("proofStrategy must be an array");
|
||||
}
|
||||
return value.map((entry, index) => {
|
||||
if (!entry || typeof entry !== "object") {
|
||||
throw new Error(`proofStrategy[${index}] must be an object`);
|
||||
}
|
||||
const riskOrUnknown = (entry as Record<string, unknown>).riskOrUnknown;
|
||||
const retireIn = (entry as Record<string, unknown>).retireIn;
|
||||
const whatWillBeProven = (entry as Record<string, unknown>).whatWillBeProven;
|
||||
if (!isNonEmptyString(riskOrUnknown) || !isNonEmptyString(retireIn) || !isNonEmptyString(whatWillBeProven)) {
|
||||
throw new Error(`proofStrategy[${index}] must include non-empty riskOrUnknown, retireIn, and whatWillBeProven`);
|
||||
}
|
||||
return { riskOrUnknown, retireIn, whatWillBeProven };
|
||||
});
|
||||
}
|
||||
|
||||
function validateSlices(value: unknown): PlanMilestoneSliceInput[] {
|
||||
if (!Array.isArray(value) || value.length === 0) {
|
||||
throw new Error("slices must be a non-empty array");
|
||||
}
|
||||
|
||||
const seen = new Set<string>();
|
||||
return value.map((entry, index) => {
|
||||
if (!entry || typeof entry !== "object") {
|
||||
throw new Error(`slices[${index}] must be an object`);
|
||||
}
|
||||
const obj = entry as Record<string, unknown>;
|
||||
const sliceId = obj.sliceId;
|
||||
const title = obj.title;
|
||||
const risk = obj.risk;
|
||||
const depends = obj.depends;
|
||||
const demo = obj.demo;
|
||||
const goal = obj.goal;
|
||||
const successCriteria = obj.successCriteria;
|
||||
const proofLevel = obj.proofLevel;
|
||||
const integrationClosure = obj.integrationClosure;
|
||||
const observabilityImpact = obj.observabilityImpact;
|
||||
|
||||
if (!isNonEmptyString(sliceId)) throw new Error(`slices[${index}].sliceId must be a non-empty string`);
|
||||
if (seen.has(sliceId)) throw new Error(`slices[${index}].sliceId must be unique`);
|
||||
seen.add(sliceId);
|
||||
if (!isNonEmptyString(title)) throw new Error(`slices[${index}].title must be a non-empty string`);
|
||||
if (!isNonEmptyString(risk)) throw new Error(`slices[${index}].risk must be a non-empty string`);
|
||||
if (!Array.isArray(depends) || depends.some((item) => !isNonEmptyString(item))) {
|
||||
throw new Error(`slices[${index}].depends must be an array of non-empty strings`);
|
||||
}
|
||||
if (!isNonEmptyString(demo)) throw new Error(`slices[${index}].demo must be a non-empty string`);
|
||||
if (!isNonEmptyString(goal)) throw new Error(`slices[${index}].goal must be a non-empty string`);
|
||||
if (!isNonEmptyString(successCriteria)) throw new Error(`slices[${index}].successCriteria must be a non-empty string`);
|
||||
if (!isNonEmptyString(proofLevel)) throw new Error(`slices[${index}].proofLevel must be a non-empty string`);
|
||||
if (!isNonEmptyString(integrationClosure)) throw new Error(`slices[${index}].integrationClosure must be a non-empty string`);
|
||||
if (!isNonEmptyString(observabilityImpact)) throw new Error(`slices[${index}].observabilityImpact must be a non-empty string`);
|
||||
|
||||
return {
|
||||
sliceId,
|
||||
title,
|
||||
risk,
|
||||
depends,
|
||||
demo,
|
||||
goal,
|
||||
successCriteria,
|
||||
proofLevel,
|
||||
integrationClosure,
|
||||
observabilityImpact,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function validateParams(params: PlanMilestoneParams): PlanMilestoneParams {
|
||||
if (!isNonEmptyString(params?.milestoneId)) throw new Error("milestoneId is required");
|
||||
if (!isNonEmptyString(params?.title)) throw new Error("title is required");
|
||||
if (!isNonEmptyString(params?.vision)) throw new Error("vision is required");
|
||||
if (!isNonEmptyString(params?.verificationContract)) throw new Error("verificationContract is required");
|
||||
if (!isNonEmptyString(params?.verificationIntegration)) throw new Error("verificationIntegration is required");
|
||||
if (!isNonEmptyString(params?.verificationOperational)) throw new Error("verificationOperational is required");
|
||||
if (!isNonEmptyString(params?.verificationUat)) throw new Error("verificationUat is required");
|
||||
if (!isNonEmptyString(params?.requirementCoverage)) throw new Error("requirementCoverage is required");
|
||||
if (!isNonEmptyString(params?.boundaryMapMarkdown)) throw new Error("boundaryMapMarkdown is required");
|
||||
|
||||
return {
|
||||
...params,
|
||||
dependsOn: params.dependsOn ? validateStringArray(params.dependsOn, "dependsOn") : [],
|
||||
successCriteria: validateStringArray(params.successCriteria, "successCriteria"),
|
||||
keyRisks: validateRiskEntries(params.keyRisks),
|
||||
proofStrategy: validateProofStrategy(params.proofStrategy),
|
||||
definitionOfDone: validateStringArray(params.definitionOfDone, "definitionOfDone"),
|
||||
slices: validateSlices(params.slices),
|
||||
};
|
||||
}
|
||||
|
||||
export async function handlePlanMilestone(
|
||||
rawParams: PlanMilestoneParams,
|
||||
basePath: string,
|
||||
): Promise<PlanMilestoneResult | { error: string }> {
|
||||
let params: PlanMilestoneParams;
|
||||
try {
|
||||
params = validateParams(rawParams);
|
||||
} catch (err) {
|
||||
return { error: `validation failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
try {
|
||||
transaction(() => {
|
||||
insertMilestone({
|
||||
id: params.milestoneId,
|
||||
title: params.title,
|
||||
status: params.status ?? "active",
|
||||
depends_on: params.dependsOn ?? [],
|
||||
});
|
||||
|
||||
upsertMilestonePlanning(params.milestoneId, {
|
||||
vision: params.vision,
|
||||
successCriteria: params.successCriteria,
|
||||
keyRisks: params.keyRisks,
|
||||
proofStrategy: params.proofStrategy,
|
||||
verificationContract: params.verificationContract,
|
||||
verificationIntegration: params.verificationIntegration,
|
||||
verificationOperational: params.verificationOperational,
|
||||
verificationUat: params.verificationUat,
|
||||
definitionOfDone: params.definitionOfDone,
|
||||
requirementCoverage: params.requirementCoverage,
|
||||
boundaryMapMarkdown: params.boundaryMapMarkdown,
|
||||
});
|
||||
|
||||
for (const slice of params.slices) {
|
||||
insertSlice({
|
||||
id: slice.sliceId,
|
||||
milestoneId: params.milestoneId,
|
||||
title: slice.title,
|
||||
status: "pending",
|
||||
risk: slice.risk,
|
||||
depends: slice.depends,
|
||||
demo: slice.demo,
|
||||
});
|
||||
upsertSlicePlanning(params.milestoneId, slice.sliceId, {
|
||||
goal: slice.goal,
|
||||
successCriteria: slice.successCriteria,
|
||||
proofLevel: slice.proofLevel,
|
||||
integrationClosure: slice.integrationClosure,
|
||||
observabilityImpact: slice.observabilityImpact,
|
||||
});
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
return { error: `db write failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
let roadmapPath: string;
|
||||
try {
|
||||
const renderResult = await renderRoadmapFromDb(basePath, params.milestoneId);
|
||||
roadmapPath = renderResult.roadmapPath;
|
||||
} catch (err) {
|
||||
return { error: `render failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
invalidateStateCache();
|
||||
clearParseCache();
|
||||
|
||||
return {
|
||||
milestoneId: params.milestoneId,
|
||||
roadmapPath,
|
||||
};
|
||||
}
|
||||
189
src/resources/extensions/gsd/tools/plan-slice.ts
Normal file
189
src/resources/extensions/gsd/tools/plan-slice.ts
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
import { clearParseCache } from "../files.js";
|
||||
import {
|
||||
transaction,
|
||||
getSlice,
|
||||
insertTask,
|
||||
upsertSlicePlanning,
|
||||
upsertTaskPlanning,
|
||||
} from "../gsd-db.js";
|
||||
import { invalidateStateCache } from "../state.js";
|
||||
import { renderPlanFromDb } from "../markdown-renderer.js";
|
||||
|
||||
export interface PlanSliceTaskInput {
|
||||
taskId: string;
|
||||
title: string;
|
||||
description: string;
|
||||
estimate: string;
|
||||
files: string[];
|
||||
verify: string;
|
||||
inputs: string[];
|
||||
expectedOutput: string[];
|
||||
observabilityImpact?: string;
|
||||
}
|
||||
|
||||
export interface PlanSliceParams {
|
||||
milestoneId: string;
|
||||
sliceId: string;
|
||||
goal: string;
|
||||
successCriteria: string;
|
||||
proofLevel: string;
|
||||
integrationClosure: string;
|
||||
observabilityImpact: string;
|
||||
tasks: PlanSliceTaskInput[];
|
||||
}
|
||||
|
||||
export interface PlanSliceResult {
|
||||
milestoneId: string;
|
||||
sliceId: string;
|
||||
planPath: string;
|
||||
taskPlanPaths: string[];
|
||||
}
|
||||
|
||||
function isNonEmptyString(value: unknown): value is string {
|
||||
return typeof value === "string" && value.trim().length > 0;
|
||||
}
|
||||
|
||||
function validateStringArray(value: unknown, field: string): string[] {
|
||||
if (!Array.isArray(value)) {
|
||||
throw new Error(`${field} must be an array`);
|
||||
}
|
||||
if (value.some((item) => !isNonEmptyString(item))) {
|
||||
throw new Error(`${field} must contain only non-empty strings`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function validateTasks(value: unknown): PlanSliceTaskInput[] {
|
||||
if (!Array.isArray(value) || value.length === 0) {
|
||||
throw new Error("tasks must be a non-empty array");
|
||||
}
|
||||
|
||||
const seen = new Set<string>();
|
||||
return value.map((entry, index) => {
|
||||
if (!entry || typeof entry !== "object") {
|
||||
throw new Error(`tasks[${index}] must be an object`);
|
||||
}
|
||||
const obj = entry as Record<string, unknown>;
|
||||
const taskId = obj.taskId;
|
||||
const title = obj.title;
|
||||
const description = obj.description;
|
||||
const estimate = obj.estimate;
|
||||
const files = obj.files;
|
||||
const verify = obj.verify;
|
||||
const inputs = obj.inputs;
|
||||
const expectedOutput = obj.expectedOutput;
|
||||
const observabilityImpact = obj.observabilityImpact;
|
||||
|
||||
if (!isNonEmptyString(taskId)) throw new Error(`tasks[${index}].taskId must be a non-empty string`);
|
||||
if (seen.has(taskId)) throw new Error(`tasks[${index}].taskId must be unique`);
|
||||
seen.add(taskId);
|
||||
if (!isNonEmptyString(title)) throw new Error(`tasks[${index}].title must be a non-empty string`);
|
||||
if (!isNonEmptyString(description)) throw new Error(`tasks[${index}].description must be a non-empty string`);
|
||||
if (!isNonEmptyString(estimate)) throw new Error(`tasks[${index}].estimate must be a non-empty string`);
|
||||
if (!Array.isArray(files) || files.some((item) => !isNonEmptyString(item))) {
|
||||
throw new Error(`tasks[${index}].files must be an array of non-empty strings`);
|
||||
}
|
||||
if (!isNonEmptyString(verify)) throw new Error(`tasks[${index}].verify must be a non-empty string`);
|
||||
if (!Array.isArray(inputs) || inputs.some((item) => !isNonEmptyString(item))) {
|
||||
throw new Error(`tasks[${index}].inputs must be an array of non-empty strings`);
|
||||
}
|
||||
if (!Array.isArray(expectedOutput) || expectedOutput.some((item) => !isNonEmptyString(item))) {
|
||||
throw new Error(`tasks[${index}].expectedOutput must be an array of non-empty strings`);
|
||||
}
|
||||
if (observabilityImpact !== undefined && !isNonEmptyString(observabilityImpact)) {
|
||||
throw new Error(`tasks[${index}].observabilityImpact must be a non-empty string when provided`);
|
||||
}
|
||||
|
||||
return {
|
||||
taskId,
|
||||
title,
|
||||
description,
|
||||
estimate,
|
||||
files,
|
||||
verify,
|
||||
inputs,
|
||||
expectedOutput,
|
||||
observabilityImpact: typeof observabilityImpact === "string" ? observabilityImpact : "",
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function validateParams(params: PlanSliceParams): PlanSliceParams {
|
||||
if (!isNonEmptyString(params?.milestoneId)) throw new Error("milestoneId is required");
|
||||
if (!isNonEmptyString(params?.sliceId)) throw new Error("sliceId is required");
|
||||
if (!isNonEmptyString(params?.goal)) throw new Error("goal is required");
|
||||
if (!isNonEmptyString(params?.successCriteria)) throw new Error("successCriteria is required");
|
||||
if (!isNonEmptyString(params?.proofLevel)) throw new Error("proofLevel is required");
|
||||
if (!isNonEmptyString(params?.integrationClosure)) throw new Error("integrationClosure is required");
|
||||
if (!isNonEmptyString(params?.observabilityImpact)) throw new Error("observabilityImpact is required");
|
||||
|
||||
return {
|
||||
...params,
|
||||
tasks: validateTasks(params.tasks),
|
||||
};
|
||||
}
|
||||
|
||||
export async function handlePlanSlice(
|
||||
rawParams: PlanSliceParams,
|
||||
basePath: string,
|
||||
): Promise<PlanSliceResult | { error: string }> {
|
||||
let params: PlanSliceParams;
|
||||
try {
|
||||
params = validateParams(rawParams);
|
||||
} catch (err) {
|
||||
return { error: `validation failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
const parentSlice = getSlice(params.milestoneId, params.sliceId);
|
||||
if (!parentSlice) {
|
||||
return { error: `missing parent slice: ${params.milestoneId}/${params.sliceId}` };
|
||||
}
|
||||
|
||||
try {
|
||||
transaction(() => {
|
||||
upsertSlicePlanning(params.milestoneId, params.sliceId, {
|
||||
goal: params.goal,
|
||||
successCriteria: params.successCriteria,
|
||||
proofLevel: params.proofLevel,
|
||||
integrationClosure: params.integrationClosure,
|
||||
observabilityImpact: params.observabilityImpact,
|
||||
});
|
||||
|
||||
for (const task of params.tasks) {
|
||||
insertTask({
|
||||
id: task.taskId,
|
||||
sliceId: params.sliceId,
|
||||
milestoneId: params.milestoneId,
|
||||
title: task.title,
|
||||
status: "pending",
|
||||
});
|
||||
upsertTaskPlanning(params.milestoneId, params.sliceId, task.taskId, {
|
||||
title: task.title,
|
||||
description: task.description,
|
||||
estimate: task.estimate,
|
||||
files: task.files,
|
||||
verify: task.verify,
|
||||
inputs: task.inputs,
|
||||
expectedOutput: task.expectedOutput,
|
||||
observabilityImpact: task.observabilityImpact ?? "",
|
||||
});
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
return { error: `db write failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
try {
|
||||
const renderResult = await renderPlanFromDb(basePath, params.milestoneId, params.sliceId);
|
||||
invalidateStateCache();
|
||||
clearParseCache();
|
||||
return {
|
||||
milestoneId: params.milestoneId,
|
||||
sliceId: params.sliceId,
|
||||
planPath: renderResult.planPath,
|
||||
taskPlanPaths: renderResult.taskPlanPaths,
|
||||
};
|
||||
} catch (err) {
|
||||
return { error: `render failed: ${(err as Error).message}` };
|
||||
}
|
||||
}
|
||||
116
src/resources/extensions/gsd/tools/plan-task.ts
Normal file
116
src/resources/extensions/gsd/tools/plan-task.ts
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
import { clearParseCache } from "../files.js";
|
||||
import { transaction, getSlice, getTask, insertTask, upsertTaskPlanning } from "../gsd-db.js";
|
||||
import { invalidateStateCache } from "../state.js";
|
||||
import { renderTaskPlanFromDb } from "../markdown-renderer.js";
|
||||
|
||||
export interface PlanTaskParams {
|
||||
milestoneId: string;
|
||||
sliceId: string;
|
||||
taskId: string;
|
||||
title: string;
|
||||
description: string;
|
||||
estimate: string;
|
||||
files: string[];
|
||||
verify: string;
|
||||
inputs: string[];
|
||||
expectedOutput: string[];
|
||||
observabilityImpact?: string;
|
||||
}
|
||||
|
||||
export interface PlanTaskResult {
|
||||
milestoneId: string;
|
||||
sliceId: string;
|
||||
taskId: string;
|
||||
taskPlanPath: string;
|
||||
}
|
||||
|
||||
function isNonEmptyString(value: unknown): value is string {
|
||||
return typeof value === "string" && value.trim().length > 0;
|
||||
}
|
||||
|
||||
function validateStringArray(value: unknown, field: string): string[] {
|
||||
if (!Array.isArray(value)) {
|
||||
throw new Error(`${field} must be an array`);
|
||||
}
|
||||
if (value.some((item) => !isNonEmptyString(item))) {
|
||||
throw new Error(`${field} must contain only non-empty strings`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function validateParams(params: PlanTaskParams): PlanTaskParams {
|
||||
if (!isNonEmptyString(params?.milestoneId)) throw new Error("milestoneId is required");
|
||||
if (!isNonEmptyString(params?.sliceId)) throw new Error("sliceId is required");
|
||||
if (!isNonEmptyString(params?.taskId)) throw new Error("taskId is required");
|
||||
if (!isNonEmptyString(params?.title)) throw new Error("title is required");
|
||||
if (!isNonEmptyString(params?.description)) throw new Error("description is required");
|
||||
if (!isNonEmptyString(params?.estimate)) throw new Error("estimate is required");
|
||||
if (!isNonEmptyString(params?.verify)) throw new Error("verify is required");
|
||||
if (params.observabilityImpact !== undefined && !isNonEmptyString(params.observabilityImpact)) {
|
||||
throw new Error("observabilityImpact must be a non-empty string when provided");
|
||||
}
|
||||
|
||||
return {
|
||||
...params,
|
||||
files: validateStringArray(params.files, "files"),
|
||||
inputs: validateStringArray(params.inputs, "inputs"),
|
||||
expectedOutput: validateStringArray(params.expectedOutput, "expectedOutput"),
|
||||
};
|
||||
}
|
||||
|
||||
export async function handlePlanTask(
|
||||
rawParams: PlanTaskParams,
|
||||
basePath: string,
|
||||
): Promise<PlanTaskResult | { error: string }> {
|
||||
let params: PlanTaskParams;
|
||||
try {
|
||||
params = validateParams(rawParams);
|
||||
} catch (err) {
|
||||
return { error: `validation failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
const parentSlice = getSlice(params.milestoneId, params.sliceId);
|
||||
if (!parentSlice) {
|
||||
return { error: `missing parent slice: ${params.milestoneId}/${params.sliceId}` };
|
||||
}
|
||||
|
||||
try {
|
||||
transaction(() => {
|
||||
if (!getTask(params.milestoneId, params.sliceId, params.taskId)) {
|
||||
insertTask({
|
||||
id: params.taskId,
|
||||
sliceId: params.sliceId,
|
||||
milestoneId: params.milestoneId,
|
||||
title: params.title,
|
||||
status: "pending",
|
||||
});
|
||||
}
|
||||
upsertTaskPlanning(params.milestoneId, params.sliceId, params.taskId, {
|
||||
title: params.title,
|
||||
description: params.description,
|
||||
estimate: params.estimate,
|
||||
files: params.files,
|
||||
verify: params.verify,
|
||||
inputs: params.inputs,
|
||||
expectedOutput: params.expectedOutput,
|
||||
observabilityImpact: params.observabilityImpact ?? "",
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
return { error: `db write failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
try {
|
||||
const renderResult = await renderTaskPlanFromDb(basePath, params.milestoneId, params.sliceId, params.taskId);
|
||||
invalidateStateCache();
|
||||
clearParseCache();
|
||||
return {
|
||||
milestoneId: params.milestoneId,
|
||||
sliceId: params.sliceId,
|
||||
taskId: params.taskId,
|
||||
taskPlanPath: renderResult.taskPlanPath,
|
||||
};
|
||||
} catch (err) {
|
||||
return { error: `render failed: ${(err as Error).message}` };
|
||||
}
|
||||
}
|
||||
203
src/resources/extensions/gsd/tools/reassess-roadmap.ts
Normal file
203
src/resources/extensions/gsd/tools/reassess-roadmap.ts
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
import { clearParseCache } from "../files.js";
|
||||
import {
|
||||
transaction,
|
||||
getMilestone,
|
||||
getMilestoneSlices,
|
||||
insertSlice,
|
||||
updateSliceFields,
|
||||
insertAssessment,
|
||||
deleteSlice,
|
||||
} from "../gsd-db.js";
|
||||
import { invalidateStateCache } from "../state.js";
|
||||
import { renderRoadmapFromDb, renderAssessmentFromDb } from "../markdown-renderer.js";
|
||||
import { join } from "node:path";
|
||||
|
||||
export interface SliceChangeInput {
|
||||
sliceId: string;
|
||||
title: string;
|
||||
risk?: string;
|
||||
depends?: string[];
|
||||
demo?: string;
|
||||
}
|
||||
|
||||
export interface ReassessRoadmapParams {
|
||||
milestoneId: string;
|
||||
completedSliceId: string;
|
||||
verdict: string;
|
||||
assessment: string;
|
||||
sliceChanges: {
|
||||
modified: SliceChangeInput[];
|
||||
added: SliceChangeInput[];
|
||||
removed: string[];
|
||||
};
|
||||
}
|
||||
|
||||
export interface ReassessRoadmapResult {
|
||||
milestoneId: string;
|
||||
completedSliceId: string;
|
||||
assessmentPath: string;
|
||||
roadmapPath: string;
|
||||
}
|
||||
|
||||
function isNonEmptyString(value: unknown): value is string {
|
||||
return typeof value === "string" && value.trim().length > 0;
|
||||
}
|
||||
|
||||
function validateParams(params: ReassessRoadmapParams): ReassessRoadmapParams {
|
||||
if (!isNonEmptyString(params?.milestoneId)) throw new Error("milestoneId is required");
|
||||
if (!isNonEmptyString(params?.completedSliceId)) throw new Error("completedSliceId is required");
|
||||
if (!isNonEmptyString(params?.verdict)) throw new Error("verdict is required");
|
||||
if (!isNonEmptyString(params?.assessment)) throw new Error("assessment is required");
|
||||
|
||||
if (!params.sliceChanges || typeof params.sliceChanges !== "object") {
|
||||
throw new Error("sliceChanges must be an object");
|
||||
}
|
||||
|
||||
if (!Array.isArray(params.sliceChanges.modified)) {
|
||||
throw new Error("sliceChanges.modified must be an array");
|
||||
}
|
||||
|
||||
if (!Array.isArray(params.sliceChanges.added)) {
|
||||
throw new Error("sliceChanges.added must be an array");
|
||||
}
|
||||
|
||||
if (!Array.isArray(params.sliceChanges.removed)) {
|
||||
throw new Error("sliceChanges.removed must be an array");
|
||||
}
|
||||
|
||||
// Validate each modified slice
|
||||
for (let i = 0; i < params.sliceChanges.modified.length; i++) {
|
||||
const s = params.sliceChanges.modified[i];
|
||||
if (!s || typeof s !== "object") throw new Error(`sliceChanges.modified[${i}] must be an object`);
|
||||
if (!isNonEmptyString(s.sliceId)) throw new Error(`sliceChanges.modified[${i}].sliceId is required`);
|
||||
if (!isNonEmptyString(s.title)) throw new Error(`sliceChanges.modified[${i}].title is required`);
|
||||
}
|
||||
|
||||
// Validate each added slice
|
||||
for (let i = 0; i < params.sliceChanges.added.length; i++) {
|
||||
const s = params.sliceChanges.added[i];
|
||||
if (!s || typeof s !== "object") throw new Error(`sliceChanges.added[${i}] must be an object`);
|
||||
if (!isNonEmptyString(s.sliceId)) throw new Error(`sliceChanges.added[${i}].sliceId is required`);
|
||||
if (!isNonEmptyString(s.title)) throw new Error(`sliceChanges.added[${i}].title is required`);
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
export async function handleReassessRoadmap(
|
||||
rawParams: ReassessRoadmapParams,
|
||||
basePath: string,
|
||||
): Promise<ReassessRoadmapResult | { error: string }> {
|
||||
// ── Validate ──────────────────────────────────────────────────────
|
||||
let params: ReassessRoadmapParams;
|
||||
try {
|
||||
params = validateParams(rawParams);
|
||||
} catch (err) {
|
||||
return { error: `validation failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
// ── Verify milestone exists ───────────────────────────────────────
|
||||
const milestone = getMilestone(params.milestoneId);
|
||||
if (!milestone) {
|
||||
return { error: `milestone not found: ${params.milestoneId}` };
|
||||
}
|
||||
|
||||
// ── Structural enforcement ────────────────────────────────────────
|
||||
const existingSlices = getMilestoneSlices(params.milestoneId);
|
||||
const completedSliceIds = new Set<string>();
|
||||
for (const slice of existingSlices) {
|
||||
if (slice.status === "complete" || slice.status === "done") {
|
||||
completedSliceIds.add(slice.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Reject modifications to completed slices
|
||||
for (const modifiedSlice of params.sliceChanges.modified) {
|
||||
if (completedSliceIds.has(modifiedSlice.sliceId)) {
|
||||
return { error: `cannot modify completed slice ${modifiedSlice.sliceId}` };
|
||||
}
|
||||
}
|
||||
|
||||
// Reject removal of completed slices
|
||||
for (const removedId of params.sliceChanges.removed) {
|
||||
if (completedSliceIds.has(removedId)) {
|
||||
return { error: `cannot remove completed slice ${removedId}` };
|
||||
}
|
||||
}
|
||||
|
||||
// ── Compute assessment artifact path ──────────────────────────────
|
||||
// Assessment lives in the completed slice's directory
|
||||
const assessmentRelPath = join(
|
||||
".gsd", "milestones", params.milestoneId,
|
||||
"slices", params.completedSliceId,
|
||||
`${params.completedSliceId}-ASSESSMENT.md`,
|
||||
);
|
||||
|
||||
// ── Transaction: DB mutations ─────────────────────────────────────
|
||||
try {
|
||||
transaction(() => {
|
||||
// Record assessment
|
||||
insertAssessment({
|
||||
path: assessmentRelPath,
|
||||
milestoneId: params.milestoneId,
|
||||
sliceId: params.completedSliceId,
|
||||
status: params.verdict,
|
||||
scope: "roadmap",
|
||||
fullContent: params.assessment,
|
||||
});
|
||||
|
||||
// Apply slice modifications
|
||||
for (const mod of params.sliceChanges.modified) {
|
||||
updateSliceFields(params.milestoneId, mod.sliceId, {
|
||||
title: mod.title,
|
||||
risk: mod.risk,
|
||||
depends: mod.depends,
|
||||
demo: mod.demo,
|
||||
});
|
||||
}
|
||||
|
||||
// Insert new slices
|
||||
for (const added of params.sliceChanges.added) {
|
||||
insertSlice({
|
||||
id: added.sliceId,
|
||||
milestoneId: params.milestoneId,
|
||||
title: added.title,
|
||||
status: "pending",
|
||||
risk: added.risk,
|
||||
depends: added.depends,
|
||||
demo: added.demo ?? "",
|
||||
});
|
||||
}
|
||||
|
||||
// Delete removed slices
|
||||
for (const removedId of params.sliceChanges.removed) {
|
||||
deleteSlice(params.milestoneId, removedId);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
return { error: `db write failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
// ── Render artifacts ──────────────────────────────────────────────
|
||||
try {
|
||||
const roadmapResult = await renderRoadmapFromDb(basePath, params.milestoneId);
|
||||
const assessmentResult = await renderAssessmentFromDb(basePath, params.milestoneId, params.completedSliceId, {
|
||||
verdict: params.verdict,
|
||||
assessment: params.assessment,
|
||||
completedSliceId: params.completedSliceId,
|
||||
});
|
||||
|
||||
// ── Invalidate caches ─────────────────────────────────────────
|
||||
invalidateStateCache();
|
||||
clearParseCache();
|
||||
|
||||
return {
|
||||
milestoneId: params.milestoneId,
|
||||
completedSliceId: params.completedSliceId,
|
||||
assessmentPath: assessmentResult.assessmentPath,
|
||||
roadmapPath: roadmapResult.roadmapPath,
|
||||
};
|
||||
} catch (err) {
|
||||
return { error: `render failed: ${(err as Error).message}` };
|
||||
}
|
||||
}
|
||||
192
src/resources/extensions/gsd/tools/replan-slice.ts
Normal file
192
src/resources/extensions/gsd/tools/replan-slice.ts
Normal file
|
|
@ -0,0 +1,192 @@
|
|||
import { clearParseCache } from "../files.js";
|
||||
import {
|
||||
transaction,
|
||||
getSlice,
|
||||
getSliceTasks,
|
||||
getTask,
|
||||
insertTask,
|
||||
upsertTaskPlanning,
|
||||
insertReplanHistory,
|
||||
deleteTask,
|
||||
} from "../gsd-db.js";
|
||||
import { invalidateStateCache } from "../state.js";
|
||||
import { renderPlanFromDb, renderReplanFromDb } from "../markdown-renderer.js";
|
||||
|
||||
export interface ReplanSliceTaskInput {
|
||||
taskId: string;
|
||||
title: string;
|
||||
description: string;
|
||||
estimate: string;
|
||||
files: string[];
|
||||
verify: string;
|
||||
inputs: string[];
|
||||
expectedOutput: string[];
|
||||
}
|
||||
|
||||
export interface ReplanSliceParams {
|
||||
milestoneId: string;
|
||||
sliceId: string;
|
||||
blockerTaskId: string;
|
||||
blockerDescription: string;
|
||||
whatChanged: string;
|
||||
updatedTasks: ReplanSliceTaskInput[];
|
||||
removedTaskIds: string[];
|
||||
}
|
||||
|
||||
export interface ReplanSliceResult {
|
||||
milestoneId: string;
|
||||
sliceId: string;
|
||||
replanPath: string;
|
||||
planPath: string;
|
||||
}
|
||||
|
||||
function isNonEmptyString(value: unknown): value is string {
|
||||
return typeof value === "string" && value.trim().length > 0;
|
||||
}
|
||||
|
||||
function validateParams(params: ReplanSliceParams): ReplanSliceParams {
|
||||
if (!isNonEmptyString(params?.milestoneId)) throw new Error("milestoneId is required");
|
||||
if (!isNonEmptyString(params?.sliceId)) throw new Error("sliceId is required");
|
||||
if (!isNonEmptyString(params?.blockerTaskId)) throw new Error("blockerTaskId is required");
|
||||
if (!isNonEmptyString(params?.blockerDescription)) throw new Error("blockerDescription is required");
|
||||
if (!isNonEmptyString(params?.whatChanged)) throw new Error("whatChanged is required");
|
||||
|
||||
if (!Array.isArray(params.updatedTasks)) {
|
||||
throw new Error("updatedTasks must be an array");
|
||||
}
|
||||
|
||||
if (!Array.isArray(params.removedTaskIds)) {
|
||||
throw new Error("removedTaskIds must be an array");
|
||||
}
|
||||
|
||||
// Validate each updated task
|
||||
for (let i = 0; i < params.updatedTasks.length; i++) {
|
||||
const t = params.updatedTasks[i];
|
||||
if (!t || typeof t !== "object") throw new Error(`updatedTasks[${i}] must be an object`);
|
||||
if (!isNonEmptyString(t.taskId)) throw new Error(`updatedTasks[${i}].taskId is required`);
|
||||
if (!isNonEmptyString(t.title)) throw new Error(`updatedTasks[${i}].title is required`);
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
export async function handleReplanSlice(
|
||||
rawParams: ReplanSliceParams,
|
||||
basePath: string,
|
||||
): Promise<ReplanSliceResult | { error: string }> {
|
||||
// ── Validate ──────────────────────────────────────────────────────
|
||||
let params: ReplanSliceParams;
|
||||
try {
|
||||
params = validateParams(rawParams);
|
||||
} catch (err) {
|
||||
return { error: `validation failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
// ── Verify parent slice exists ────────────────────────────────────
|
||||
const parentSlice = getSlice(params.milestoneId, params.sliceId);
|
||||
if (!parentSlice) {
|
||||
return { error: `missing parent slice: ${params.milestoneId}/${params.sliceId}` };
|
||||
}
|
||||
|
||||
// ── Structural enforcement ────────────────────────────────────────
|
||||
const existingTasks = getSliceTasks(params.milestoneId, params.sliceId);
|
||||
const completedTaskIds = new Set<string>();
|
||||
for (const task of existingTasks) {
|
||||
if (task.status === "complete" || task.status === "done") {
|
||||
completedTaskIds.add(task.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Reject updates to completed tasks
|
||||
for (const updatedTask of params.updatedTasks) {
|
||||
if (completedTaskIds.has(updatedTask.taskId)) {
|
||||
return { error: `cannot modify completed task ${updatedTask.taskId}` };
|
||||
}
|
||||
}
|
||||
|
||||
// Reject removal of completed tasks
|
||||
for (const removedId of params.removedTaskIds) {
|
||||
if (completedTaskIds.has(removedId)) {
|
||||
return { error: `cannot remove completed task ${removedId}` };
|
||||
}
|
||||
}
|
||||
|
||||
// ── Transaction: DB mutations ─────────────────────────────────────
|
||||
const existingTaskIds = new Set(existingTasks.map((t) => t.id));
|
||||
|
||||
try {
|
||||
transaction(() => {
|
||||
// Record replan history
|
||||
insertReplanHistory({
|
||||
milestoneId: params.milestoneId,
|
||||
sliceId: params.sliceId,
|
||||
taskId: params.blockerTaskId,
|
||||
summary: params.whatChanged,
|
||||
});
|
||||
|
||||
// Apply task updates (upsert existing, insert new)
|
||||
for (const updatedTask of params.updatedTasks) {
|
||||
if (existingTaskIds.has(updatedTask.taskId)) {
|
||||
// Update existing task's planning fields
|
||||
upsertTaskPlanning(params.milestoneId, params.sliceId, updatedTask.taskId, {
|
||||
title: updatedTask.title,
|
||||
description: updatedTask.description || "",
|
||||
estimate: updatedTask.estimate || "",
|
||||
files: updatedTask.files || [],
|
||||
verify: updatedTask.verify || "",
|
||||
inputs: updatedTask.inputs || [],
|
||||
expectedOutput: updatedTask.expectedOutput || [],
|
||||
});
|
||||
} else {
|
||||
// Insert new task then set planning fields
|
||||
insertTask({
|
||||
id: updatedTask.taskId,
|
||||
sliceId: params.sliceId,
|
||||
milestoneId: params.milestoneId,
|
||||
title: updatedTask.title,
|
||||
status: "pending",
|
||||
});
|
||||
upsertTaskPlanning(params.milestoneId, params.sliceId, updatedTask.taskId, {
|
||||
title: updatedTask.title,
|
||||
description: updatedTask.description || "",
|
||||
estimate: updatedTask.estimate || "",
|
||||
files: updatedTask.files || [],
|
||||
verify: updatedTask.verify || "",
|
||||
inputs: updatedTask.inputs || [],
|
||||
expectedOutput: updatedTask.expectedOutput || [],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Delete removed tasks
|
||||
for (const removedId of params.removedTaskIds) {
|
||||
deleteTask(params.milestoneId, params.sliceId, removedId);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
return { error: `db write failed: ${(err as Error).message}` };
|
||||
}
|
||||
|
||||
// ── Render artifacts ──────────────────────────────────────────────
|
||||
try {
|
||||
const renderResult = await renderPlanFromDb(basePath, params.milestoneId, params.sliceId);
|
||||
const replanResult = await renderReplanFromDb(basePath, params.milestoneId, params.sliceId, {
|
||||
blockerTaskId: params.blockerTaskId,
|
||||
blockerDescription: params.blockerDescription,
|
||||
whatChanged: params.whatChanged,
|
||||
});
|
||||
|
||||
// ── Invalidate caches ─────────────────────────────────────────
|
||||
invalidateStateCache();
|
||||
clearParseCache();
|
||||
|
||||
return {
|
||||
milestoneId: params.milestoneId,
|
||||
sliceId: params.sliceId,
|
||||
replanPath: replanResult.replanPath,
|
||||
planPath: renderResult.planPath,
|
||||
};
|
||||
} catch (err) {
|
||||
return { error: `render failed: ${(err as Error).message}` };
|
||||
}
|
||||
}
|
||||
|
|
@ -12,6 +12,7 @@
|
|||
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { createRequire } from "node:module";
|
||||
import { gsdRoot, milestonesDir } from "./paths.js";
|
||||
import { MILESTONE_ID_RE } from "./milestone-ids.js";
|
||||
import type { Classification, CaptureEntry } from "./captures.js";
|
||||
|
|
@ -90,19 +91,37 @@ export function executeReplan(
|
|||
const triggerPath = join(
|
||||
basePath, ".gsd", "milestones", mid, "slices", sid, `${sid}-REPLAN-TRIGGER.md`,
|
||||
);
|
||||
const ts = new Date().toISOString();
|
||||
const content = [
|
||||
`# Replan Trigger`,
|
||||
``,
|
||||
`**Source:** Capture ${capture.id}`,
|
||||
`**Capture:** ${capture.text}`,
|
||||
`**Rationale:** ${capture.rationale ?? "User-initiated replan via capture triage"}`,
|
||||
`**Triggered:** ${new Date().toISOString()}`,
|
||||
`**Triggered:** ${ts}`,
|
||||
``,
|
||||
`This file was created by the triage pipeline. The next dispatch cycle`,
|
||||
`will detect it and enter the replanning-slice phase.`,
|
||||
].join("\n");
|
||||
|
||||
writeFileSync(triggerPath, content, "utf-8");
|
||||
|
||||
// Also write replan_triggered_at column for DB-backed detection
|
||||
try {
|
||||
const req = createRequire(import.meta.url);
|
||||
const { isDbAvailable, _getAdapter } = req("./gsd-db.js");
|
||||
if (isDbAvailable()) {
|
||||
const adapter = _getAdapter();
|
||||
if (adapter) {
|
||||
adapter.prepare(
|
||||
"UPDATE slices SET replan_triggered_at = :ts WHERE milestone_id = :mid AND id = :sid",
|
||||
).run({ ":ts": ts, ":mid": mid, ":sid": sid });
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// DB write is best-effort — disk file is the primary trigger for fallback path
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@
|
|||
import { existsSync, readFileSync, statSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { deriveState } from './state.js';
|
||||
import { parseRoadmap, parsePlan, parseSummary, loadFile } from './files.js';
|
||||
import { parseSummary, loadFile } from './files.js';
|
||||
import { isDbAvailable, getMilestoneSlices, getSliceTasks } from './gsd-db.js';
|
||||
import { findMilestoneIds } from './milestone-ids.js';
|
||||
import { resolveMilestoneFile, resolveSliceFile, resolveGsdRootFile, gsdRoot } from './paths.js';
|
||||
import {
|
||||
|
|
@ -796,10 +797,17 @@ export async function loadVisualizerData(basePath: string): Promise<VisualizerDa
|
|||
const roadmapFile = resolveMilestoneFile(basePath, mid, 'ROADMAP');
|
||||
const roadmapContent = roadmapFile ? readFileCached(roadmapFile) : null;
|
||||
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
if (roadmapContent || isDbAvailable()) {
|
||||
// Normalize slices from DB
|
||||
type NormSlice = { id: string; done: boolean; title: string; risk: string; depends: string[]; demo: string };
|
||||
let normSlices: NormSlice[];
|
||||
if (isDbAvailable()) {
|
||||
normSlices = getMilestoneSlices(mid).map(s => ({ id: s.id, done: s.status === 'complete', title: s.title, risk: s.risk || 'medium', depends: s.depends, demo: s.demo }));
|
||||
} else {
|
||||
normSlices = [];
|
||||
}
|
||||
|
||||
for (const s of roadmap.slices) {
|
||||
for (const s of normSlices) {
|
||||
const isActiveSlice =
|
||||
state.activeMilestone?.id === mid &&
|
||||
state.activeSlice?.id === s.id;
|
||||
|
|
@ -807,16 +815,13 @@ export async function loadVisualizerData(basePath: string): Promise<VisualizerDa
|
|||
const tasks: VisualizerTask[] = [];
|
||||
|
||||
if (isActiveSlice) {
|
||||
const planFile = resolveSliceFile(basePath, mid, s.id, 'PLAN');
|
||||
const planContent = planFile ? readFileCached(planFile) : null;
|
||||
|
||||
if (planContent) {
|
||||
const plan = parsePlan(planContent);
|
||||
for (const t of plan.tasks) {
|
||||
// Normalize tasks from DB
|
||||
if (isDbAvailable()) {
|
||||
for (const t of getSliceTasks(mid, s.id)) {
|
||||
tasks.push({
|
||||
id: t.id,
|
||||
title: t.title,
|
||||
done: t.done,
|
||||
done: t.status === 'complete' || t.status === 'done',
|
||||
active: state.activeTask?.id === t.id,
|
||||
estimate: t.estimate || undefined,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { join } from "node:path";
|
||||
|
||||
import { loadFile, parsePlan, parseRoadmap } from "./files.js";
|
||||
import { loadFile } from "./files.js";
|
||||
import { isDbAvailable, getMilestoneSlices, getSliceTasks } from "./gsd-db.js";
|
||||
import {
|
||||
resolveMilestoneFile,
|
||||
resolveSliceFile,
|
||||
|
|
@ -11,7 +12,6 @@ import {
|
|||
import { deriveState } from "./state.js";
|
||||
import { milestoneIdSort, findMilestoneIds } from "./guided-flow.js";
|
||||
import type { RiskLevel } from "./types.js";
|
||||
import { type ValidationIssue, validateCompleteBoundary, validatePlanBoundary } from "./observability-validator.js";
|
||||
import { getSliceBranchName, detectWorktreeName } from "./worktree.js";
|
||||
|
||||
export interface WorkspaceTaskTarget {
|
||||
|
|
@ -59,13 +59,15 @@ export interface GSDWorkspaceIndex {
|
|||
phase: string;
|
||||
};
|
||||
scopes: WorkspaceScopeTarget[];
|
||||
validationIssues: ValidationIssue[];
|
||||
validationIssues: Array<Record<string, unknown>>;
|
||||
}
|
||||
|
||||
|
||||
// Extract milestone title from roadmap header without using parsers.
|
||||
// Falls back to the milestone ID if no title line found.
|
||||
function titleFromRoadmapHeader(content: string, fallbackId: string): string {
|
||||
const roadmap = parseRoadmap(content);
|
||||
return roadmap.title.replace(/^M\d+(?:-[a-z0-9]{6})?[^:]*:\s*/, "") || fallbackId;
|
||||
// Parse the "# M001: Title" header directly
|
||||
const match = content.match(/^#\s+M\d+(?:-[a-z0-9]{6})?[^:]*:\s*(.+)/m);
|
||||
return match?.[1]?.trim() || fallbackId;
|
||||
}
|
||||
|
||||
async function indexSlice(basePath: string, milestoneId: string, sliceId: string, fallbackTitle: string, done: boolean, roadmapMeta?: { risk?: RiskLevel; depends?: string[]; demo?: string }): Promise<WorkspaceSliceTarget> {
|
||||
|
|
@ -77,22 +79,21 @@ async function indexSlice(basePath: string, milestoneId: string, sliceId: string
|
|||
const tasks: WorkspaceTaskTarget[] = [];
|
||||
let title = fallbackTitle;
|
||||
|
||||
if (planPath) {
|
||||
const content = await loadFile(planPath);
|
||||
if (content) {
|
||||
const plan = parsePlan(content);
|
||||
title = plan.title || fallbackTitle;
|
||||
for (const task of plan.tasks) {
|
||||
tasks.push({
|
||||
id: task.id,
|
||||
title: task.title,
|
||||
done: task.done,
|
||||
planPath: resolveTaskFile(basePath, milestoneId, sliceId, task.id, "PLAN") ?? undefined,
|
||||
summaryPath: resolveTaskFile(basePath, milestoneId, sliceId, task.id, "SUMMARY") ?? undefined,
|
||||
});
|
||||
}
|
||||
// Prefer DB for task data
|
||||
if (isDbAvailable()) {
|
||||
const dbTasks = getSliceTasks(milestoneId, sliceId);
|
||||
for (const task of dbTasks) {
|
||||
title = fallbackTitle; // title comes from slice-level data, not plan
|
||||
tasks.push({
|
||||
id: task.id,
|
||||
title: task.title,
|
||||
done: task.status === "complete" || task.status === "done",
|
||||
planPath: resolveTaskFile(basePath, milestoneId, sliceId, task.id, "PLAN") ?? undefined,
|
||||
summaryPath: resolveTaskFile(basePath, milestoneId, sliceId, task.id, "SUMMARY") ?? undefined,
|
||||
});
|
||||
}
|
||||
}
|
||||
// When DB unavailable, tasks stays empty
|
||||
|
||||
return {
|
||||
id: sliceId,
|
||||
|
|
@ -111,53 +112,41 @@ async function indexSlice(basePath: string, milestoneId: string, sliceId: string
|
|||
}
|
||||
|
||||
export interface IndexWorkspaceOptions {
|
||||
/**
|
||||
* When true, run validatePlanBoundary and validateCompleteBoundary for each slice.
|
||||
* Skipped by default — validation is expensive (content analysis) and only needed
|
||||
* for explicit doctor/audit flows. The /gsd status dashboard and scope pickers
|
||||
* don't need the full issue list.
|
||||
*/
|
||||
validate?: boolean;
|
||||
}
|
||||
|
||||
export async function indexWorkspace(basePath: string, opts: IndexWorkspaceOptions = {}): Promise<GSDWorkspaceIndex> {
|
||||
const milestoneIds = findMilestoneIds(basePath);
|
||||
const milestones: WorkspaceMilestoneTarget[] = [];
|
||||
const validationIssues: ValidationIssue[] = [];
|
||||
const runValidation = opts.validate === true;
|
||||
|
||||
for (const milestoneId of milestoneIds) {
|
||||
const roadmapPath = resolveMilestoneFile(basePath, milestoneId, "ROADMAP") ?? undefined;
|
||||
let title = milestoneId;
|
||||
const slices: WorkspaceSliceTarget[] = [];
|
||||
|
||||
if (roadmapPath) {
|
||||
const roadmapContent = await loadFile(roadmapPath);
|
||||
if (roadmapContent) {
|
||||
const roadmap = parseRoadmap(roadmapContent);
|
||||
title = titleFromRoadmapHeader(roadmapContent, milestoneId);
|
||||
if (roadmapPath || isDbAvailable()) {
|
||||
// Normalize slices from DB
|
||||
type NormSlice = { id: string; done: boolean; title: string; risk: string; depends: string[]; demo: string };
|
||||
let normSlices: NormSlice[];
|
||||
if (isDbAvailable()) {
|
||||
normSlices = getMilestoneSlices(milestoneId).map(s => ({ id: s.id, done: s.status === "complete", title: s.title, risk: s.risk || "medium", depends: s.depends, demo: s.demo }));
|
||||
// Get title from roadmap header
|
||||
if (roadmapPath) {
|
||||
const roadmapContent = await loadFile(roadmapPath);
|
||||
if (roadmapContent) title = titleFromRoadmapHeader(roadmapContent, milestoneId);
|
||||
}
|
||||
} else {
|
||||
normSlices = [];
|
||||
}
|
||||
|
||||
// Parallelise all per-slice I/O: indexSlice + (optional) validation calls run concurrently.
|
||||
// Order is preserved via Promise.all on an array built from roadmap.slices.
|
||||
if (normSlices!.length > 0) {
|
||||
const sliceResults = await Promise.all(
|
||||
roadmap.slices.map(async (slice) => {
|
||||
if (runValidation) {
|
||||
const [indexedSlice, planIssues, completeIssues] = await Promise.all([
|
||||
indexSlice(basePath, milestoneId, slice.id, slice.title, slice.done, { risk: slice.risk, depends: slice.depends, demo: slice.demo }),
|
||||
validatePlanBoundary(basePath, milestoneId, slice.id),
|
||||
validateCompleteBoundary(basePath, milestoneId, slice.id),
|
||||
]);
|
||||
return { indexedSlice, issues: [...planIssues, ...completeIssues] };
|
||||
}
|
||||
const indexedSlice = await indexSlice(basePath, milestoneId, slice.id, slice.title, slice.done, { risk: slice.risk, depends: slice.depends, demo: slice.demo });
|
||||
return { indexedSlice, issues: [] as ValidationIssue[] };
|
||||
normSlices!.map(async (slice) => {
|
||||
return indexSlice(basePath, milestoneId, slice.id, slice.title, slice.done, { risk: slice.risk as RiskLevel, depends: slice.depends, demo: slice.demo });
|
||||
}),
|
||||
);
|
||||
|
||||
for (const { indexedSlice, issues } of sliceResults) {
|
||||
slices.push(indexedSlice);
|
||||
validationIssues.push(...issues);
|
||||
}
|
||||
slices.push(...sliceResults);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -187,7 +176,7 @@ export async function indexWorkspace(basePath: string, opts: IndexWorkspaceOptio
|
|||
}
|
||||
}
|
||||
|
||||
return { milestones, active, scopes, validationIssues };
|
||||
return { milestones, active, scopes, validationIssues: [] };
|
||||
}
|
||||
|
||||
export async function listDoctorScopeSuggestions(basePath: string): Promise<Array<{ value: string; label: string }>> {
|
||||
|
|
@ -207,8 +196,7 @@ export async function listDoctorScopeSuggestions(basePath: string): Promise<Arra
|
|||
}
|
||||
|
||||
export async function getSuggestedNextCommands(basePath: string): Promise<string[]> {
|
||||
// Run validation here since we surface a /gsd doctor audit hint when issues exist.
|
||||
const index = await indexWorkspace(basePath, { validate: true });
|
||||
const index = await indexWorkspace(basePath);
|
||||
const scope = index.active.milestoneId && index.active.sliceId
|
||||
? `${index.active.milestoneId}/${index.active.sliceId}`
|
||||
: index.active.milestoneId;
|
||||
|
|
@ -218,7 +206,6 @@ export async function getSuggestedNextCommands(basePath: string): Promise<string
|
|||
if (index.active.phase === "executing" || index.active.phase === "summarizing") commands.add("/gsd auto");
|
||||
if (scope) commands.add(`/gsd doctor ${scope}`);
|
||||
if (scope) commands.add(`/gsd doctor fix ${scope}`);
|
||||
if (index.validationIssues.length > 0 && scope) commands.add(`/gsd doctor audit ${scope}`);
|
||||
commands.add("/gsd status");
|
||||
return [...commands];
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue