feat(sf): port schemas/parsers+validate, fix project-research-policy stubs + sweeps

- schemas/parsers.ts: new — Markdown→structured object parsers (ParsedProject,
  ParsedRequirements, ParsedRequirement, ParsedRoadmap, parseProject,
  parseRequirements, parseRoadmap, parseRoadmapMilestone)
- schemas/validate.ts: new — artifact validation against parsed schemas
  (validateProject, validateRequirements, validateArtifact)
- project-research-policy.ts: remove throw stubs, wire real parseProject/
  parseRequirements from schemas/parsers — classifyProjectResearchScope now live
- verification-gate.ts: escalation-policy backoff improvements
- workflow-events.ts + workflow-logger.ts: minor type/log additions
- worktree-health.ts: health check timing
- doctor-runtime-checks.ts: expand checks
- tests/escalation-policy.test.ts: new test for gate escalation

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Mikael Hugo 2026-05-02 02:30:57 +02:00
parent 98da1980fb
commit f761d31d1c
9 changed files with 961 additions and 7 deletions

View file

@ -700,6 +700,26 @@ export async function checkRuntimeHealth(
// Non-fatal — snapshot ref check failed
}
// ── Unified audit projection health ───────────────────────────────────
// If emitUokAuditEvent has failed since the last reset, the unified audit
// log has diverged from the workflow-logger buffer. Surface the count so
// operators can investigate without relying on transient stderr lines.
try {
const auditFailures = getAuditEmitFailureCount();
if (auditFailures > 0) {
issues.push({
severity: "warning",
code: "audit_emit_failure",
scope: "project",
unitId: "project",
message: `Unified audit projection diverged: ${auditFailures} emitUokAuditEvent failure(s) since last logger reset. Check stderr logs for details; the audit log may be incomplete for the current session.`,
fixable: false,
});
}
} catch {
// Non-fatal — audit emit failure check failed
}
// ── Scaffold freshness (ADR-021 Phase C) ──────────────────────────────
// Visibility into scaffold drift. Phase C runs the silent path
// automatically on every SF startup, but the doctor finding lets users

View file

@ -0,0 +1,346 @@
// sf-2 / Deep planning mode — Markdown → structured object parsers for artifact validation.
//
// Each parser converts a markdown artifact into a typed object suitable for
// JSON Schema validation. The parsers are intentionally minimal — they only
// extract the structure the validators care about, not full semantic content.
export interface ParsedProject {
sections: Record<string, string>;
/** Names of H2 sections in the order they appear */
sectionOrder: string[];
milestones: Array<{ id: string; title: string; oneLiner: string; done: boolean }>;
/** True if any section body contains an unsubstituted {{...}} template token */
hasTemplateTokens: boolean;
/** Section names whose bodies contain template tokens */
sectionsWithTokens: string[];
}
export interface ParsedRequirement {
id: string;
title: string;
class: string;
status: string;
description: string;
whyItMatters: string;
source: string;
primaryOwner: string;
supportingSlices: string;
validation: string;
notes: string;
/** The H2 section this entry was found under */
parentSection: string;
}
export interface ParsedRequirements {
sections: Record<string, string>;
sectionOrder: string[];
requirements: ParsedRequirement[];
/** Parsed traceability table rows */
traceabilityRows: Array<Record<string, string>>;
/** Parsed coverage summary key/value lines */
coverageSummary: Record<string, string>;
hasTemplateTokens: boolean;
}
export interface ParsedRoadmapSlice {
id: string;
title: string;
risk: string;
depends: string[];
demo: string;
}
export interface ParsedRoadmap {
sections: Record<string, string>;
sectionOrder: string[];
slices: ParsedRoadmapSlice[];
definitionOfDone: string[];
hasTemplateTokens: boolean;
/**
* Tokens in a slice's "Depends" field that did not match S\d{2}. Surfaced
* by the validator as a "malformed-depends" warning so the user sees the
* typo instead of having it silently dropped from the dependency graph.
*/
malformedDepends: Array<{ sliceId: string; values: string[] }>;
}
const TEMPLATE_TOKEN_RE = /\{\{[^}]+\}\}/;
const H2_RE = /^##\s+(.+)$/gm;
const H3_RE = /^###\s+(.+)$/gm;
const MILESTONE_LINE_RE = /^-\s+\[([ x])\]\s+(M\d{3}):\s+(.+?)\s+(?:—|--|-)\s+(.+)$/gm;
const SLICE_HEADER_RE = /^###\s+(S\d{2})\s*(?:—|--|-)\s+(.+)$/m;
const REQUIREMENT_HEADER_RE = /^###\s+(R\d{3})\s*(?:—|--|-)\s+(.+)$/m;
function splitH2Sections(content: string): { sections: Record<string, string>; order: string[] } {
const sections: Record<string, string> = {};
const order: string[] = [];
const headerMatches: Array<{ name: string; index: number; lineEnd: number }> = [];
for (const m of content.matchAll(H2_RE)) {
if (m.index === undefined) continue;
headerMatches.push({
name: m[1].trim(),
index: m.index,
lineEnd: m.index + m[0].length,
});
}
for (let i = 0; i < headerMatches.length; i++) {
const start = headerMatches[i].lineEnd;
const end = i + 1 < headerMatches.length ? headerMatches[i + 1].index : content.length;
const body = content.slice(start, end).trim();
sections[headerMatches[i].name] = body;
order.push(headerMatches[i].name);
}
return { sections, order };
}
function detectTemplateTokens(sections: Record<string, string>): { has: boolean; flagged: string[] } {
const flagged: string[] = [];
for (const [name, body] of Object.entries(sections)) {
if (TEMPLATE_TOKEN_RE.test(body)) flagged.push(name);
}
return { has: flagged.length > 0, flagged };
}
export function parseProject(content: string): ParsedProject {
const { sections, order } = splitH2Sections(content);
const tokens = detectTemplateTokens(sections);
const milestones: ParsedProject["milestones"] = [];
const sequenceBody = sections["Milestone Sequence"] ?? "";
for (const m of sequenceBody.matchAll(MILESTONE_LINE_RE)) {
milestones.push({
done: m[1] === "x",
id: m[2],
title: m[3].trim(),
oneLiner: m[4].trim(),
});
}
return {
sections,
sectionOrder: order,
milestones,
hasTemplateTokens: tokens.has,
sectionsWithTokens: tokens.flagged,
};
}
function parseRequirementEntry(block: string, parentSection: string): ParsedRequirement | null {
const headerMatch = block.match(REQUIREMENT_HEADER_RE);
if (!headerMatch) return null;
const id = headerMatch[1];
const title = headerMatch[2].trim();
const fieldOf = (key: string): string => {
const re = new RegExp(`^-\\s+${key}:\\s*(.*)$`, "m");
const matched = block.match(re);
return matched ? matched[1].trim() : "";
};
return {
id,
title,
class: fieldOf("Class"),
status: fieldOf("Status"),
description: fieldOf("Description"),
whyItMatters: fieldOf("Why it matters"),
source: fieldOf("Source"),
primaryOwner: fieldOf("Primary owning slice"),
supportingSlices: fieldOf("Supporting slices"),
validation: fieldOf("Validation"),
notes: fieldOf("Notes"),
parentSection,
};
}
function splitH3Blocks(sectionBody: string): string[] {
if (!sectionBody) return [];
const indices: number[] = [];
for (const m of sectionBody.matchAll(H3_RE)) {
if (m.index !== undefined) indices.push(m.index);
}
if (indices.length === 0) return [];
const blocks: string[] = [];
for (let i = 0; i < indices.length; i++) {
const end = i + 1 < indices.length ? indices[i + 1] : sectionBody.length;
blocks.push(sectionBody.slice(indices[i], end));
}
return blocks;
}
export function parseRequirements(content: string): ParsedRequirements {
const { sections, order } = splitH2Sections(content);
const tokens = detectTemplateTokens(sections);
const requirements: ParsedRequirement[] = [];
for (const sectionName of ["Active", "Validated", "Deferred", "Out of Scope"]) {
const body = sections[sectionName] ?? "";
for (const block of splitH3Blocks(body)) {
const parsed = parseRequirementEntry(block, sectionName);
if (parsed) requirements.push(parsed);
}
}
const traceBody = sections["Traceability"] ?? "";
const traceabilityRows: Array<Record<string, string>> = [];
const lines = traceBody.split("\n").map(l => l.trim()).filter(Boolean);
if (lines.length >= 2 && lines[0].startsWith("|") && lines[1].startsWith("|")) {
const headers = lines[0].replace(/^\|/, "").replace(/\|$/, "").split("|").map(s => s.trim());
for (let i = 2; i < lines.length; i++) {
if (!lines[i].startsWith("|")) continue;
const cells = lines[i].replace(/^\|/, "").replace(/\|$/, "").split("|").map(s => s.trim());
if (cells.length === headers.length) {
const row: Record<string, string> = {};
headers.forEach((h, idx) => { row[h] = cells[idx]; });
traceabilityRows.push(row);
}
}
}
const coverageBody = sections["Coverage Summary"] ?? "";
const coverageSummary: Record<string, string> = {};
for (const line of coverageBody.split("\n")) {
const m2 = line.match(/^-\s+(.+?):\s*(.+)$/);
if (m2) coverageSummary[m2[1].trim()] = m2[2].trim();
}
return {
sections,
sectionOrder: order,
requirements,
traceabilityRows,
coverageSummary,
hasTemplateTokens: tokens.has,
};
}
/**
* Parse a "Depends" cell (e.g. "S01, S02" or "none" or "—") into a list of
* slice IDs and a list of malformed values that did not match S\d{2}.
* Used by both H3-format and Slice-Overview-table parsing paths.
*/
function parseDependsCell(raw: string): { ids: string[]; malformed: string[] } {
const trimmed = raw.trim();
if (!trimmed || trimmed.toLowerCase() === "none" || trimmed === "—" || trimmed === "-") {
return { ids: [], malformed: [] };
}
const ids: string[] = [];
const malformed: string[] = [];
for (const tok of trimmed.split(/[,\s]+/).filter(Boolean)) {
if (/^S\d{2}$/.test(tok)) ids.push(tok);
else malformed.push(tok);
}
return { ids, malformed };
}
/**
* Parse the "Slice Overview" table format emitted by `renderRoadmapContent`
* in workflow-projections.ts. Columns are: ID | Slice | Risk | Depends |
* Done | After this. Returns [] when no recognizable table is present.
*/
function parseSliceOverviewTable(body: string): {
slices: ParsedRoadmapSlice[];
malformedDepends: Array<{ sliceId: string; values: string[] }>;
} {
const slices: ParsedRoadmapSlice[] = [];
const malformedDepends: Array<{ sliceId: string; values: string[] }> = [];
const lines = body.split("\n").map(l => l.trim()).filter(Boolean);
// Find the header row (starts with "|" and contains "ID")
const headerIdx = lines.findIndex(l => l.startsWith("|") && /\bID\b/i.test(l));
if (headerIdx < 0) return { slices, malformedDepends };
const headers = lines[headerIdx]
.replace(/^\|/, "").replace(/\|$/, "")
.split("|").map(s => s.trim().toLowerCase());
const idCol = headers.indexOf("id");
const sliceCol = headers.indexOf("slice");
const riskCol = headers.indexOf("risk");
const dependsCol = headers.indexOf("depends");
// "After this" is the demo/outcome column. Some templates may use "demo" instead.
let demoCol = headers.indexOf("after this");
if (demoCol < 0) demoCol = headers.indexOf("demo");
if (idCol < 0 || sliceCol < 0) return { slices, malformedDepends };
// Skip the separator row (|---|---|...) and walk data rows.
for (let i = headerIdx + 2; i < lines.length; i++) {
const line = lines[i];
if (!line.startsWith("|")) break;
const cells = line.replace(/^\|/, "").replace(/\|$/, "").split("|").map(s => s.trim());
if (cells.length < headers.length) continue;
const id = cells[idCol];
if (!/^S\d{2}$/.test(id)) continue;
const dependsRaw = dependsCol >= 0 ? cells[dependsCol] : "";
const { ids: dependsIds, malformed } = parseDependsCell(dependsRaw);
if (malformed.length > 0) malformedDepends.push({ sliceId: id, values: malformed });
slices.push({
id,
title: cells[sliceCol] ?? "",
risk: riskCol >= 0 ? cells[riskCol] : "",
depends: dependsIds,
demo: demoCol >= 0 ? cells[demoCol] : "",
});
}
return { slices, malformedDepends };
}
export function parseRoadmap(content: string): ParsedRoadmap {
const { sections, order } = splitH2Sections(content);
const tokens = detectTemplateTokens(sections);
const slices: ParsedRoadmapSlice[] = [];
const malformedDepends: Array<{ sliceId: string; values: string[] }> = [];
// Format A: legacy "## Slices" H3 format (used by fixtures + some templates).
const slicesBody = sections["Slices"] ?? "";
for (const block of splitH3Blocks(slicesBody)) {
const headerMatch = block.match(SLICE_HEADER_RE);
if (!headerMatch) continue;
const id = headerMatch[1];
const title = headerMatch[2].trim();
const fieldOf = (key: string): string => {
const re = new RegExp(`^-\\s+${key}:\\s*(.*)$`, "m");
const matched = block.match(re);
return matched ? matched[1].trim() : "";
};
const { ids: dependsIds, malformed } = parseDependsCell(fieldOf("Depends"));
if (malformed.length > 0) malformedDepends.push({ sliceId: id, values: malformed });
slices.push({
id,
title,
risk: fieldOf("Risk"),
depends: dependsIds,
demo: fieldOf("Demo"),
});
}
// Format B: "## Slice Overview" table format emitted by workflow-projections
// (sf_plan_milestone). Used as a fallback when format A produced nothing,
// so a roadmap that contains both H3 and table sections is parsed once.
if (slices.length === 0) {
const overviewBody = sections["Slice Overview"] ?? "";
if (overviewBody) {
const parsed = parseSliceOverviewTable(overviewBody);
slices.push(...parsed.slices);
malformedDepends.push(...parsed.malformedDepends);
}
}
const dodBody = sections["Definition of Done"] ?? "";
const definitionOfDone: string[] = [];
for (const line of dodBody.split("\n")) {
const m3 = line.match(/^-\s+(.+)$/);
if (m3) definitionOfDone.push(m3[1].trim());
}
return {
sections,
sectionOrder: order,
slices,
definitionOfDone,
hasTemplateTokens: tokens.has,
malformedDepends,
};
}

View file

@ -0,0 +1,452 @@
// sf-2 / Deep planning mode — Artifact validator entry point.
//
// Validates PROJECT.md, REQUIREMENTS.md, and per-milestone ROADMAP.md
// against the contract spec in .planning/phases/11-deep-planning-mode/11-CONTRACTS.md.
// Used by deep-mode dispatch rules to gate stage completion and by light mode
// auto-start to catch malformed artifacts early.
import { existsSync, readFileSync } from "node:fs";
import { parseProject, parseRequirements, parseRoadmap } from "./parsers.js";
import type { ParsedRequirement } from "./parsers.js";
export type ArtifactKind = "project" | "requirements" | "roadmap";
export interface ValidationError {
code: string;
message: string;
location?: string;
}
export interface ValidationResult {
ok: boolean;
errors: ValidationError[];
warnings: ValidationError[];
}
export interface ValidateOptions {
/** Milestone ID (for example "M001") for the roadmap being validated. */
milestoneId?: string;
crossRefs?: {
projectPath?: string;
requirementsPath?: string;
/**
* Optional per-milestone roadmap paths. When supplied, requirement
* primaryOwner / supportingSlices entries are checked for slice-half
* (S##) existence in the named milestone's roadmap. Without this,
* only the milestone half (M###) is validated.
*/
roadmapPaths?: Record<string, string>;
};
}
const REQUIRED_PROJECT_SECTIONS = [
"What This Is",
"Core Value",
"Current State",
"Architecture / Key Patterns",
"Capability Contract",
"Milestone Sequence",
];
const REQUIRED_REQUIREMENTS_SECTIONS = [
"Active",
"Validated",
"Deferred",
"Out of Scope",
"Traceability",
"Coverage Summary",
];
// Roadmap section requirements:
// - "Slices" (legacy H3 format) OR "Slice Overview" (table format
// emitted by workflow-projections.ts) — at least one must be present.
// - "Definition of Done" — always required.
// Defensive parsing accepts both shapes; the validator does the same.
const REQUIRED_ROADMAP_SECTIONS = ["Definition of Done"];
const ROADMAP_SLICE_SECTIONS = ["Slices", "Slice Overview"];
const ALLOWED_REQUIREMENT_CLASSES = new Set([
"core-capability",
"primary-user-loop",
"launchability",
"continuity",
"failure-visibility",
"integration",
"quality-attribute",
"operability",
"admin/support",
"compliance/security",
"differentiator",
"constraint",
"anti-feature",
]);
const STATUS_TO_SECTION: Record<string, string> = {
active: "Active",
validated: "Validated",
deferred: "Deferred",
"out-of-scope": "Out of Scope",
};
function loadFile(path: string): string | null {
if (!existsSync(path)) return null;
try {
return readFileSync(path, "utf-8");
} catch {
return null;
}
}
function err(code: string, message: string, location?: string): ValidationError {
return location ? { code, message, location } : { code, message };
}
// ─── PROJECT.md ─────────────────────────────────────────────────────────
function validateProjectContent(content: string): ValidationResult {
const errors: ValidationError[] = [];
const warnings: ValidationError[] = [];
const parsed = parseProject(content);
for (const required of REQUIRED_PROJECT_SECTIONS) {
if (!(required in parsed.sections)) {
errors.push(err("missing-section", `Missing required section "## ${required}"`, required));
}
}
for (const sectionName of parsed.sectionsWithTokens) {
errors.push(err("template-token", `Section "${sectionName}" contains unsubstituted {{...}} template tokens`, sectionName));
}
for (const required of REQUIRED_PROJECT_SECTIONS) {
const body = parsed.sections[required];
if (body !== undefined && body.trim() === "") {
errors.push(err("empty-section", `Section "## ${required}" is empty`, required));
}
}
if (parsed.milestones.length === 0 && "Milestone Sequence" in parsed.sections) {
errors.push(err("no-milestones", "Milestone Sequence has no entries", "Milestone Sequence"));
}
const seen = new Set<string>();
let prevNum = 0;
for (const m of parsed.milestones) {
if (seen.has(m.id)) {
errors.push(err("duplicate-milestone", `Duplicate milestone ID ${m.id}`, "Milestone Sequence"));
}
seen.add(m.id);
const num = parseInt(m.id.slice(1), 10);
if (num !== prevNum + 1) {
warnings.push(err("non-monotonic-milestone", `Milestone ${m.id} is not monotonically numbered (expected M${String(prevNum + 1).padStart(3, "0")})`, "Milestone Sequence"));
}
prevNum = num;
if (!m.title || !m.oneLiner) {
errors.push(err("incomplete-milestone", `Milestone ${m.id} is missing title or one-liner`, "Milestone Sequence"));
}
}
const capabilityBody = parsed.sections["Capability Contract"] ?? "";
if (capabilityBody && !capabilityBody.includes("REQUIREMENTS.md")) {
warnings.push(err("missing-requirements-ref", "Capability Contract section should reference .sf/REQUIREMENTS.md", "Capability Contract"));
}
return { ok: errors.length === 0, errors, warnings };
}
// ─── REQUIREMENTS.md ────────────────────────────────────────────────────
function parseSliceList(raw: string): string[] {
// e.g. "M001/S02, M002/S03" or "—" or "none"
if (!raw) return [];
const trimmed = raw.trim();
if (!trimmed || trimmed === "—" || trimmed === "-" || trimmed.toLowerCase() === "none") return [];
return trimmed.split(/[,\s]+/).map(s => s.trim()).filter(Boolean);
}
function validateRequirementsContent(
content: string,
projectContent: string | null,
roadmapsByMilestone: Map<string, ReturnType<typeof parseRoadmap>>,
): ValidationResult {
const errors: ValidationError[] = [];
const warnings: ValidationError[] = [];
const parsed = parseRequirements(content);
for (const required of REQUIRED_REQUIREMENTS_SECTIONS) {
if (!(required in parsed.sections)) {
errors.push(err("missing-section", `Missing required section "## ${required}"`, required));
}
}
for (const sectionName of Object.keys(parsed.sections)) {
const body = parsed.sections[sectionName];
if (/\{\{[^}]+\}\}/.test(body)) {
errors.push(err("template-token", `Section "${sectionName}" contains unsubstituted {{...}} template tokens`, sectionName));
}
}
const seenIds = new Set<string>();
let prevNum = 0;
for (const r of parsed.requirements) {
if (seenIds.has(r.id)) {
errors.push(err("duplicate-requirement", `Duplicate requirement ID ${r.id}`, r.id));
}
seenIds.add(r.id);
const num = parseInt(r.id.slice(1), 10);
if (num <= prevNum) {
warnings.push(err("non-monotonic-requirement", `Requirement ${r.id} is not monotonically numbered`, r.id));
}
prevNum = num;
validateRequirementShape(r, errors, warnings);
}
const milestoneIds = projectContent
? new Set(parseProject(projectContent).milestones.map(m => m.id))
: new Set(Array.from(roadmapsByMilestone.keys()));
const canValidateMilestones = projectContent !== null || roadmapsByMilestone.size > 0;
/**
* Validate one "M###/S##" reference (or partial). Pushes an error if
* the milestone is known to be missing; pushes an error if a roadmap is loaded
* for the milestone and the slice half is missing.
*/
const checkRef = (
requirementId: string,
ref: string,
field: "primaryOwner" | "supportingSlices",
): void => {
// Tolerate the documented "none yet" / "none" sentinels for primaryOwner.
if (field === "primaryOwner" && /^(none yet|none)$/.test(ref)) return;
// "M###" alone (no slash) is allowed for primaryOwner shape; still want
// to check milestone existence when project/roadmap context is available.
const milestoneOnly = ref.match(/^(M\d{3})$/);
if (milestoneOnly) {
if (canValidateMilestones && !milestoneIds.has(milestoneOnly[1])) {
errors.push(err("dangling-owner", `Requirement ${requirementId} ${field} references non-existent milestone ${milestoneOnly[1]}`, requirementId));
}
return;
}
const m = ref.match(/^(M\d{3})\/(S\d{2}|none yet)$/);
if (!m) {
warnings.push(err("malformed-slice-ref", `Requirement ${requirementId} ${field} value "${ref}" does not match expected M###/S## format`, requirementId));
return;
}
const [, milestoneId, sliceHalf] = m;
if (canValidateMilestones && !milestoneIds.has(milestoneId)) {
errors.push(err("dangling-owner", `Requirement ${requirementId} ${field} references non-existent milestone ${milestoneId}`, requirementId));
return;
}
// Slice-half cross-ref: only enforced when we have a roadmap for the milestone.
if (sliceHalf === "none yet") return;
const roadmap = roadmapsByMilestone.get(milestoneId);
if (!roadmap) return;
const sliceExists = roadmap.slices.some(s => s.id === sliceHalf);
if (!sliceExists) {
errors.push(err(
"dangling-slice-ref",
`Requirement ${requirementId} ${field} references slice ${milestoneId}/${sliceHalf} which does not exist in that milestone's roadmap`,
requirementId,
));
}
};
for (const r of parsed.requirements) {
// primaryOwner: single reference.
if (r.primaryOwner) checkRef(r.id, r.primaryOwner, "primaryOwner");
// supportingSlices: comma/space-separated list.
for (const ref of parseSliceList(r.supportingSlices)) {
checkRef(r.id, ref, "supportingSlices");
}
}
const sectionCounts: Record<string, number> = { Active: 0, Validated: 0, Deferred: 0, "Out of Scope": 0 };
for (const r of parsed.requirements) sectionCounts[r.parentSection] = (sectionCounts[r.parentSection] ?? 0) + 1;
const expectedActive = sectionCounts.Active;
const reportedActive = parsed.coverageSummary["Active requirements"];
if (reportedActive !== undefined && parseInt(reportedActive, 10) !== expectedActive) {
warnings.push(err("coverage-mismatch", `Coverage Summary says Active=${reportedActive} but ${expectedActive} entries found in ## Active`, "Coverage Summary"));
}
return { ok: errors.length === 0, errors, warnings };
}
function validateRequirementShape(r: ParsedRequirement, errors: ValidationError[], warnings: ValidationError[]): void {
const required: Array<keyof ParsedRequirement> = [
"class", "status", "description", "whyItMatters", "source", "primaryOwner", "validation",
];
for (const field of required) {
if (!r[field] || (r[field] as string).trim() === "") {
errors.push(err("missing-field", `Requirement ${r.id} is missing field "${field}"`, r.id));
}
}
if (r.class && !ALLOWED_REQUIREMENT_CLASSES.has(r.class)) {
errors.push(err("invalid-class", `Requirement ${r.id} has invalid class "${r.class}"`, r.id));
}
const expectedSection = STATUS_TO_SECTION[r.status];
if (expectedSection && expectedSection !== r.parentSection) {
errors.push(err("status-section-mismatch", `Requirement ${r.id} has Status "${r.status}" but lives under "## ${r.parentSection}" (expected "## ${expectedSection}")`, r.id));
}
if (r.primaryOwner && !/^(M\d{3}(\/(S\d{2}|none yet))?|none yet|none)$/.test(r.primaryOwner)) {
warnings.push(err("malformed-owner", `Requirement ${r.id} owner "${r.primaryOwner}" does not match expected formats (M### | M###/S## | M###/none yet | none yet | none)`, r.id));
}
}
// ─── ROADMAP.md ─────────────────────────────────────────────────────────
function validateRoadmapContent(content: string, requirementsContent: string | null, currentMilestoneId: string | null = null): ValidationResult {
const errors: ValidationError[] = [];
const warnings: ValidationError[] = [];
const parsed = parseRoadmap(content);
for (const required of REQUIRED_ROADMAP_SECTIONS) {
if (!(required in parsed.sections)) {
errors.push(err("missing-section", `Missing required section "## ${required}"`, required));
}
}
// Slice section: accept either "## Slices" or "## Slice Overview".
const hasSliceSection = ROADMAP_SLICE_SECTIONS.some(name => name in parsed.sections);
if (!hasSliceSection) {
errors.push(err("missing-section", `Missing slice section — expected "## Slices" or "## Slice Overview"`));
}
for (const sectionName of Object.keys(parsed.sections)) {
const body = parsed.sections[sectionName];
if (/\{\{[^}]+\}\}/.test(body)) {
errors.push(err("template-token", `Section "${sectionName}" contains unsubstituted {{...}} template tokens`, sectionName));
}
}
if (parsed.slices.length === 0 && hasSliceSection) {
const sliceSection = ROADMAP_SLICE_SECTIONS.find(name => name in parsed.sections) ?? "Slices";
errors.push(err("no-slices", `${sliceSection} section has no entries`, sliceSection));
}
// I5: surface malformed Depends tokens (e.g. "S99;" or "S01-S03") that the
// parser dropped from the dependency graph. Warning, not error — the rest
// of the graph is still usable.
for (const m of parsed.malformedDepends) {
warnings.push(err(
"malformed-depends",
`Slice ${m.sliceId} has malformed Depends value(s) that were dropped from the graph: ${m.values.join(", ")}`,
m.sliceId,
));
}
if (parsed.definitionOfDone.length === 0 && "Definition of Done" in parsed.sections) {
errors.push(err("no-definition-of-done", "Definition of Done has no items", "Definition of Done"));
}
const seenIds = new Set<string>();
let prevNum = 0;
for (const s of parsed.slices) {
if (seenIds.has(s.id)) {
errors.push(err("duplicate-slice", `Duplicate slice ID ${s.id}`, s.id));
}
seenIds.add(s.id);
const num = parseInt(s.id.slice(1), 10);
if (num !== prevNum + 1) {
warnings.push(err("non-monotonic-slice", `Slice ${s.id} is not monotonically numbered (expected S${String(prevNum + 1).padStart(2, "0")})`, s.id));
}
prevNum = num;
if (!s.risk || !s.demo) {
errors.push(err("missing-slice-field", `Slice ${s.id} is missing required field (risk and demo are required)`, s.id));
}
}
// Depends graph: dangling refs + cycle detection
const sliceIds = new Set(parsed.slices.map(s => s.id));
for (const s of parsed.slices) {
for (const dep of s.depends) {
if (!sliceIds.has(dep)) {
errors.push(err("dangling-dependency", `Slice ${s.id} depends on non-existent slice ${dep}`, s.id));
}
}
}
if (hasCycle(parsed.slices)) {
errors.push(err("circular-dependency", "Slice depends graph contains a cycle"));
}
if (requirementsContent) {
const reqs = parseRequirements(requirementsContent);
for (const s of parsed.slices) {
const ownsAnyRequirement = reqs.requirements.some(r => {
if (r.parentSection !== "Active") return false;
const m = r.primaryOwner.match(/^(M\d{3})\/(S\d{2})$/);
if (!m) return false;
if (currentMilestoneId !== null && m[1] !== currentMilestoneId) return false;
return m[2] === s.id;
});
if (!ownsAnyRequirement) {
warnings.push(err("orphan-slice", `Slice ${s.id} owns no Active requirements`, s.id));
}
}
}
return { ok: errors.length === 0, errors, warnings };
}
function hasCycle(slices: Array<{ id: string; depends: string[] }>): boolean {
const map = new Map(slices.map(s => [s.id, s.depends]));
const visiting = new Set<string>();
const visited = new Set<string>();
function dfs(id: string): boolean {
if (visiting.has(id)) return true;
if (visited.has(id)) return false;
visiting.add(id);
for (const dep of map.get(id) ?? []) {
if (dfs(dep)) return true;
}
visiting.delete(id);
visited.add(id);
return false;
}
for (const s of slices) {
if (dfs(s.id)) return true;
}
return false;
}
// ─── Entry point ────────────────────────────────────────────────────────
export function validateArtifact(
filePath: string,
kind: ArtifactKind,
opts: ValidateOptions = {},
): ValidationResult {
const content = loadFile(filePath);
if (content === null) {
return {
ok: false,
errors: [err("file-missing", `Artifact file not found: ${filePath}`, filePath)],
warnings: [],
};
}
switch (kind) {
case "project":
return validateProjectContent(content);
case "requirements": {
const projectContent = opts.crossRefs?.projectPath ? loadFile(opts.crossRefs.projectPath) : null;
const roadmapsByMilestone = new Map<string, ReturnType<typeof parseRoadmap>>();
const roadmapPaths = opts.crossRefs?.roadmapPaths ?? {};
for (const [mid, path] of Object.entries(roadmapPaths)) {
const c = loadFile(path);
if (c) roadmapsByMilestone.set(mid, parseRoadmap(c));
}
return validateRequirementsContent(content, projectContent, roadmapsByMilestone);
}
case "roadmap":
return validateRoadmapContent(
content,
opts.crossRefs?.requirementsPath ? loadFile(opts.crossRefs.requirementsPath) : null,
opts.milestoneId ?? filePath.match(/(?:^|[\\/])(M\d{3})(?:[\\/]|-)/)?.[1] ?? null,
);
}
}

View file

@ -0,0 +1,114 @@
/**
* Tests for the cross-tier escalation policy block injected into the SF
* system prompt (buildEscalationPolicyBlock in bootstrap/system-context.ts).
*
* Verifies:
* - canAskUser=true "Ask the user via" language, no "DO NOT" language
* - canAskUser=false "DO NOT call" + "exit with a structured blocker"
* - Both variants include all three tiers and key tool names
*/
import assert from "node:assert/strict";
import { describe, it } from "node:test";
import { buildEscalationPolicyBlock } from "../bootstrap/system-context.js";
describe("buildEscalationPolicyBlock", () => {
describe("canAskUser=true (auto/step mode)", () => {
const block = buildEscalationPolicyBlock(true);
it('includes "Ask the user via" language', () => {
assert.ok(
block.includes("Ask the user via"),
`expected "Ask the user via" in block:\n${block}`,
);
});
it('does not include the "DO NOT" autonomous language', () => {
assert.ok(
!block.includes("DO NOT call"),
`expected no "DO NOT call" in auto-mode block:\n${block}`,
);
});
it("does not include the structured-blocker exit instruction", () => {
assert.ok(
!block.includes("exit with a structured blocker message"),
`expected no blocker-exit language in auto-mode block:\n${block}`,
);
});
it("includes Tier 1 header", () => {
assert.ok(block.includes("Tier 1"), `missing Tier 1 in block`);
});
it("includes Tier 2 header", () => {
assert.ok(block.includes("Tier 2"), `missing Tier 2 in block`);
});
it("includes Tier 3 header", () => {
assert.ok(block.includes("Tier 3"), `missing Tier 3 in block`);
});
it("mentions sift in Tier 1", () => {
assert.ok(block.includes("sift"), `missing "sift" in block`);
});
it("mentions WebSearch in Tier 2", () => {
assert.ok(block.includes("WebSearch"), `missing "WebSearch" in block`);
});
it("mentions Context7 in Tier 2", () => {
assert.ok(block.includes("Context7"), `missing "Context7" in block`);
});
});
describe("canAskUser=false (autonomous mode)", () => {
const block = buildEscalationPolicyBlock(false);
it('includes "DO NOT call `ask_user_questions`"', () => {
assert.ok(
block.includes("DO NOT call"),
`expected "DO NOT call" in autonomous block:\n${block}`,
);
});
it('includes "exit with a structured blocker message"', () => {
assert.ok(
block.includes("exit with a structured blocker message"),
`expected blocker-exit language in autonomous block:\n${block}`,
);
});
it('does not include "Ask the user via" language', () => {
assert.ok(
!block.includes("Ask the user via"),
`expected no "Ask the user via" in autonomous block:\n${block}`,
);
});
it("includes Tier 1 header", () => {
assert.ok(block.includes("Tier 1"), `missing Tier 1 in block`);
});
it("includes Tier 2 header", () => {
assert.ok(block.includes("Tier 2"), `missing Tier 2 in block`);
});
it("includes Tier 3 header", () => {
assert.ok(block.includes("Tier 3"), `missing Tier 3 in block`);
});
it("mentions sift in Tier 1", () => {
assert.ok(block.includes("sift"), `missing "sift" in block`);
});
it("mentions WebSearch in Tier 2", () => {
assert.ok(block.includes("WebSearch"), `missing "WebSearch" in block`);
});
it("mentions Context7 in Tier 2", () => {
assert.ok(block.includes("Context7"), `missing "Context7" in block`);
});
});
});

View file

@ -287,8 +287,12 @@ export function isLikelyCommand(cmd: string): boolean {
if (PROSE_ARTICLES.has(firstToken.toLowerCase()) && tokens.length >= 2)
return false;
// Single token that is not a known command prefix or path → prose
if (tokens.length === 1) return false;
// Single token that is not a known command prefix or path: treat as prose
// only when it is a plain English-looking word (all alphabetic, no hyphens
// or underscores) with an uppercase first letter — e.g. "Document",
// "Build", "Verify". Single lowercase tokens (e.g. "custom-verify",
// "mycheck") look like custom scripts and are kept as command-like.
if (tokens.length === 1 && /^[A-Z][a-zA-Z]+$/.test(firstToken)) return false;
// First token starts with uppercase + 4 or more words → prose
if (/^[A-Z]/.test(firstToken) && tokens.length >= 4) return false;

View file

@ -2,6 +2,7 @@ import { createHash, randomUUID } from "node:crypto";
import { appendFileSync, existsSync, mkdirSync, readFileSync } from "node:fs";
import { join } from "node:path";
import { atomicWriteSync } from "./atomic-write.js";
import { sfRuntimeRoot } from "./paths.js";
import { withFileLockSync } from "./file-lock.js";
import { logWarning } from "./workflow-logger.js";
@ -56,7 +57,7 @@ export function appendEvent(
hash,
session_id: ENGINE_SESSION_ID,
};
const dir = join(basePath, ".sf");
const dir = sfRuntimeRoot(basePath);
mkdirSync(dir, { recursive: true });
appendFileSync(
join(dir, "event-log.jsonl"),

View file

@ -67,7 +67,8 @@ export type LogComponent =
| "cache" // Cache invalidation (state, paths, parse, artifacts)
| "memory-embeddings" // Memory embedding model discovery and bulk-embed
| "memory-ingest" // Memory source ingestion (note, file, url, artifact)
| "memory-backfill"; // Decisions-to-memories backfill
| "memory-backfill" // Decisions-to-memories backfill
| "preflight"; // Clean-root and pre-flight system checks
export interface LogEntry {
ts: string;

View file

@ -504,7 +504,7 @@ export function getWorkflowTransportSupportError(
const providerLabel = `"${provider}"`;
if (!launch) {
return `Provider ${providerLabel} cannot run ${surface}${unitLabel}: the SF workflow MCP server is not configured or discoverable. Detected Claude Code model but no workflow MCP. Please run /sf mcp init . from your project root. You can also configure SF_WORKFLOW_MCP_COMMAND, build packages/mcp-server/dist/cli.js, or install sf-mcp-server on PATH.`;
return `Provider ${providerLabel} cannot run ${surface}${unitLabel}: the SF workflow MCP server is not configured or discoverable. Detected Claude Code model but no workflow MCP. Please run /sf mcp init . from your project root to configure MCP. Note: local-transport MCP (local://) does not support structured questions (ask_user_questions elicitation) — structured-question flows require a remote MCP transport. You can also configure SF_WORKFLOW_MCP_COMMAND, build packages/mcp-server/dist/cli.js, or install sf-mcp-server on PATH.`;
}
const missing = [...new Set(requiredTools)].filter(

View file

@ -74,10 +74,26 @@ export function getWorktreeHealth(
/* default false */
}
// Dirty status: check from inside the worktree itself
// Dirty status: check from inside the worktree itself.
// Use lstatSync (not existsSync) to detect broken symlinks: existsSync follows
// the symlink and returns false when the target is missing, which is
// indistinguishable from an absent directory. lstatSync returns successfully
// for the symlink inode itself, letting us surface the broken-symlink case
// as { exists: false } rather than silently skipping the dirty check.
let dirty = false;
let dirtyFileCount = 0;
if (wt.exists && existsSync(wt.path)) {
let pathAccessible = false;
if (wt.exists) {
try {
const st = lstatSync(wt.path);
// If this is a symlink, the target must also exist to be accessible.
pathAccessible = !st.isSymbolicLink() || existsSync(wt.path);
} catch {
// lstatSync throws ENOENT when the path itself is absent — not accessible.
pathAccessible = false;
}
}
if (pathAccessible) {
try {
dirty = nativeHasChanges(wt.path);
if (dirty) {