sf snapshot: pre-dispatch, uncommitted changes after 33m inactivity

This commit is contained in:
Mikael Hugo 2026-05-05 01:11:49 +02:00
parent d3954ff529
commit b92d7bc96b
8 changed files with 1011 additions and 241 deletions

View file

@ -1196,47 +1196,9 @@ export async function postUnitPostVerification(pctx) {
// ── Doc-sync drift check (BUILD_PLAN Tier 2.2) ──
// After code-mutating units, check whether ARCHITECTURE.md or other tracked
// docs are out of sync with the actual codebase. Advisory — never blocks.
if (s.currentUnit?.type === "execute-task" || s.currentUnit?.type === "complete-slice") {
try {
const { getDocSyncProposal, formatDocSyncProposal } = await import("../doc-sync.js");
const { runGit: runGitCmd } = await import("./git-service.js");
const fs = await import("node:fs");
const path = await import("node:path");
const diffResult = runGitCmd(s.basePath, ["diff", "--name-only", "HEAD~1", "HEAD"]);
const changedFiles = diffResult?.stdout
? diffResult.stdout.trim().split("\n").filter(Boolean)
: [];
if (changedFiles.length > 0) {
const proposals = getDocSyncProposal(
changedFiles,
{
readFile: (p) => {
try { return fs.readFileSync(path.join(s.basePath, p), "utf-8"); }
catch { return null; }
},
},
{
glob: (_pat) => {
try {
const out = runGitCmd(s.basePath, ["ls-files"]);
return out?.stdout ? out.stdout.trim().split("\n").filter(Boolean) : [];
}
catch { return []; }
},
},
);
if (proposals) {
const msg = formatDocSyncProposal(proposals);
if (msg) {
ctx.ui.notify(msg, "info");
debugLog("postUnit", { phase: "doc-sync", driftCount: proposals.reduce((n, p) => n + p.drift.length, 0) });
}
}
}
}
catch (e) {
debugLog("postUnit", { phase: "doc-sync", error: e instanceof Error ? e.message : String(e) });
}
if (s.currentUnit) {
const { runDocSyncStagingCheck } = await import("./auto/auto-post-unit-staging.js");
await runDocSyncStagingCheck(s.basePath, s.currentUnit.type, ctx);
}
// ── Knowledge compounding (Mechanism 4) ──
// After milestone completion, distill high-confidence judgment-log entries

View file

@ -0,0 +1,82 @@
/**
* auto/auto-post-unit-staging.js Post-unit staging checks for doc-sync drift.
*
* Purpose: after code-mutating units, check whether ARCHITECTURE.md (and future
* CONVENTIONS.md/STACK.md) are out of sync with the actual codebase. Advisory
* never blocks dispatch.
*
* Consumer: auto-post-unit.js postUnitPostVerification.
*/
import { readFileSync } from "node:fs";
import { join } from "node:path";
import { getDocSyncProposal, formatDocSyncProposal } from "../doc-sync.js";
import { runGit } from "../git-service.js";
import { debugLog } from "../debug-logger.js";
/** Unit types that mutate code — doc-sync only runs after these. */
const CODE_MUTATING_UNITS = new Set([
"execute-task",
"complete-slice",
]);
/**
* Run the doc-sync drift check after a code-mutating unit.
*
* @param {string} basePath - Project base path
* @param {string} unitType - Type of the completed unit
* @param {{ ui: { notify: (msg: string, level: string) => void } }} ctx - UI context
*/
export async function runDocSyncStagingCheck(basePath, unitType, ctx) {
if (!CODE_MUTATING_UNITS.has(unitType)) {
return;
}
try {
const diffResult = runGit(basePath, ["diff", "--name-only", "HEAD~1", "HEAD"]);
const changedFiles = diffResult?.stdout
? diffResult.stdout.trim().split("\n").filter(Boolean)
: [];
if (changedFiles.length === 0) {
return;
}
const proposals = getDocSyncProposal(
changedFiles,
{
readFile: (p) => {
try {
return readFileSync(join(basePath, p), "utf-8");
} catch {
return null;
}
},
},
{
glob: (_pat) => {
try {
const out = runGit(basePath, ["ls-files"]);
return out?.stdout ? out.stdout.trim().split("\n").filter(Boolean) : [];
} catch {
return [];
}
},
},
);
if (proposals) {
const msg = formatDocSyncProposal(proposals);
if (msg) {
ctx.ui.notify(msg, "info");
debugLog("postUnit", {
phase: "doc-sync",
driftCount: proposals.reduce((n, p) => n + p.drift.length, 0),
});
}
}
} catch (e) {
debugLog("postUnit", {
phase: "doc-sync",
error: e instanceof Error ? e.message : String(e),
});
}
}

View file

@ -0,0 +1,209 @@
/**
* Doc-sync drift detection for tracked architecture documentation.
*
* Purpose: detect when ARCHITECTURE.md (and future CONVENTIONS.md/STACK.md)
* are out of sync with the actual codebase structure. Returns a proposal
* describing what changed, so the user can decide whether to update docs.
*
* Consumer: auto-post-unit-staging.ts (called after code-mutating phases).
*/
/** Tracked documentation files to check for drift. */
const TRACKED_DOCS = ["ARCHITECTURE.md", "CONVENTIONS.md", "STACK.md"];
/**
* Parse the codemap table from ARCHITECTURE.md content.
* Returns an array of { path, purpose } entries.
*
* Purpose: extract structured path data from Markdown tables for comparison.
* Consumer: detectDocDrift().
*/
export function parseCodemapTable(content) {
const lines = content.split("\n");
const entries = [];
let inTable = false;
for (const line of lines) {
if (line.trim().startsWith("|") && line.includes("`")) {
inTable = true;
// Skip header separator rows
if (line.match(/^\|[\s\-|]+\|$/)) continue;
const cells = line.split("|").map((c) => c.trim()).filter(Boolean);
if (cells.length >= 2 && cells[0].includes("`")) {
const pathMatch = cells[0].match(/`([^`]+)`/);
if (pathMatch) {
entries.push({
path: pathMatch[1],
purpose: cells.slice(1).join(" | ").trim(),
});
}
}
} else if (inTable && !line.trim().startsWith("|")) {
// End of table
break;
}
}
return entries;
}
/**
* Check whether a filesystem path pattern matches an actual file.
* Handles globs like `src/resources/extensions/sf/` (directory)
* and `src/loader.ts` (file).
*
* Purpose: validate codemap entries against actual filesystem.
* Consumer: detectDocDrift().
*/
function pathExistsOnDisk(docPath, existingPaths) {
// Exact match
if (existingPaths.has(docPath)) return true;
// Directory prefix match (codemap uses trailing /)
if (docPath.endsWith("/")) {
for (const p of existingPaths) {
if (p.startsWith(docPath) || p === docPath.slice(0, -1)) return true;
}
}
// Prefix match (codemap entry is parent of actual files)
for (const p of existingPaths) {
if (p.startsWith(docPath.replace(/\/$/, "/"))) return true;
}
return false;
}
/**
* Detect drift between tracked documentation and actual codebase state.
*
* Purpose: the core detection function compares doc content against
* filesystem reality and changed files to determine if docs are stale.
* Consumer: getDocSyncProposal().
*
* @param {string} docContent - Content of the documentation file
* @param {string[]} changedFiles - Files changed in the current unit
* @param {Set<string>} existingPaths - Set of paths that exist on disk
* @returns {Array<{type: string, detail: string}>|null} Drift items or null
*/
export function detectDocDrift(docContent, changedFiles, existingPaths) {
if (!docContent || changedFiles.length === 0) return null;
const entries = parseCodemapTable(docContent);
if (entries.length === 0) return null;
const drift = [];
// Check 1: codemap paths that no longer exist
for (const entry of entries) {
// Skip non-local paths (e.g. ~/.sf/...)
if (entry.path.startsWith("~")) continue;
if (!pathExistsOnDisk(entry.path, existingPaths)) {
drift.push({
type: "stale_path",
detail: `Codemap path \`${entry.path}\` no longer exists on disk`,
});
}
}
// Check 2: changed files that belong to undocumented directories
const documentedDirs = new Set(
entries
.map((e) => e.path.replace(/\/[^/]*$/, "/"))
.filter((p) => !p.startsWith("~")),
);
const undocumentedChanges = changedFiles.filter((f) => {
// Check if any documented directory is a prefix of this file
for (const dir of documentedDirs) {
if (f.startsWith(dir)) return false;
}
// Skip .sf/ files — those are workspace, not codebase
if (f.startsWith(".sf/")) return false;
// Skip dist/ — it's generated
if (f.startsWith("dist/")) return false;
// Skip test-only files
if (f.includes(".test.") && !documentedDirs.has("harness/")) return false;
return true;
});
if (undocumentedChanges.length > 0) {
// Extract unique directory prefixes
const dirs = new Set(
undocumentedChanges.map((f) => {
const parts = f.split("/");
return parts.length > 1 ? parts.slice(0, -1).join("/") + "/" : f;
}),
);
for (const dir of dirs) {
drift.push({
type: "undocumented_path",
detail: `Changed files under \`${dir}\` but this path is not in the codemap`,
});
}
}
return drift.length > 0 ? drift : null;
}
/**
* Get a doc-sync proposal for the current unit's changes.
* Reads tracked docs, runs drift detection, returns proposal or null.
*
* Purpose: orchestrate the drift check read docs, gather filesystem state,
* run detection. Returns a human-readable proposal.
* Consumer: auto-post-unit-staging.ts.
*
* @param {string[]} changedFiles - Files changed in the current unit
* @param {{ readFile: (path: string) => string|null }} io - IO abstraction for testing
* @param {{ glob: (pattern: string) => string[] }} fs - Filesystem abstraction
* @returns {{ doc: string, drift: Array }|null} Proposal or null
*/
export function getDocSyncProposal(changedFiles, io, fs) {
if (!changedFiles || changedFiles.length === 0) return null;
// Only check code-mutating changes (skip .sf/, dist/, docs-only)
const codeChanges = changedFiles.filter(
(f) =>
!f.startsWith(".sf/") &&
!f.startsWith("dist/") &&
!f.startsWith("docs/") &&
!f.startsWith("BUILD_PLAN"),
);
if (codeChanges.length === 0) return null;
const proposals = [];
for (const docFile of TRACKED_DOCS) {
const content = io.readFile(docFile);
if (!content) continue; // Doc doesn't exist — skip
const existingPaths = new Set(fs.glob("**/*"));
const drift = detectDocDrift(content, codeChanges, existingPaths);
if (drift && drift.length > 0) {
proposals.push({ doc: docFile, drift });
}
}
return proposals.length > 0 ? proposals : null;
}
/**
* Format a doc-sync proposal as a human-readable notification.
*
* Purpose: convert the machine-readable proposal into a user-facing message.
* Consumer: auto-post-unit-staging.ts notification logging.
*/
export function formatDocSyncProposal(proposals) {
if (!proposals || proposals.length === 0) return null;
const lines = ["📄 Doc-sync: documentation may be out of date"];
for (const { doc, drift } of proposals) {
lines.push(` ${doc}:`);
for (const item of drift) {
lines.push(` - ${item.detail}`);
}
}
lines.push(" Run `sf plan promote` to update if needed.");
return lines.join("\n");
}

View file

@ -0,0 +1,222 @@
/**
* Schedule Store append-only JSONL persistence for scheduled entries.
*
* Purpose: provide durable, queryable storage for schedule entries with
* status-grouping semantics (latest entry per ID wins) and time-based queries.
*
* Consumer: schedule CLI commands (S02), auto-dispatch reminders, and UI overlays.
*/
import {
appendFileSync,
closeSync,
existsSync,
mkdirSync,
openSync,
readFileSync,
} from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { withFileLockSync } from "../file-lock.js";
import { sfRuntimeRoot } from "../paths.js";
// ─── Constants ──────────────────────────────────────────────────────────────
const FILENAME = "schedule.jsonl";
/** @type {string} */
const _sfHome = process.env.SF_HOME || join(homedir(), ".sf");
// ─── Public API ─────────────────────────────────────────────────────────────
/**
* Create a schedule store bound to a project base path.
*
* Purpose: factory that closes over basePath so callers don't repeat it
* on every operation.
*
* Consumer: CLI commands and auto-mode schedulers.
*
* @param {string} basePath
* @returns {{
* appendEntry: (scope: import("./schedule-types.js").ScheduleScope, entry: import("./schedule-types.js").ScheduleEntry) => void,
* readEntries: (scope: import("./schedule-types.js").ScheduleScope) => import("./schedule-types.js").ScheduleEntry[],
* findDue: (scope: import("./schedule-types.js").ScheduleScope, now: string|number|Date) => import("./schedule-types.js").ScheduleEntry[],
* findUpcoming: (scope: import("./schedule-types.js").ScheduleScope, now: string|number|Date, windowDays: number) => import("./schedule-types.js").ScheduleEntry[],
* _filePathForScope: (scope: import("./schedule-types.js").ScheduleScope) => string,
* }}
*/
export function createScheduleStore(basePath) {
return {
appendEntry: (scope, entry) => _appendEntry(basePath, scope, entry),
readEntries: (scope) => _readEntries(basePath, scope),
findDue: (scope, now) => _findDue(basePath, scope, now),
findUpcoming: (scope, now, windowDays) =>
_findUpcoming(basePath, scope, now, windowDays),
_filePathForScope: (scope) => _resolvePath(basePath, scope),
};
}
/**
* Resolve the absolute file path for a given schedule scope.
*
* Purpose: allow diagnostics and tests to inspect the backing file directly.
*
* @param {string} basePath
* @param {import("./schedule-types.js").ScheduleScope} scope
* @returns {string}
*/
export function resolveSchedulePath(basePath, scope) {
return _resolvePath(basePath, scope);
}
// ─── Internal ───────────────────────────────────────────────────────────────
/**
* @param {string} basePath
* @param {import("./schedule-types.js").ScheduleScope} scope
* @returns {string}
*/
function _resolvePath(basePath, scope) {
if (scope === "global") {
return join(_sfHome, FILENAME);
}
return join(sfRuntimeRoot(basePath), FILENAME);
}
/**
* @param {string} basePath
* @param {import("./schedule-types.js").ScheduleScope} scope
* @param {import("./schedule-types.js").ScheduleEntry} entry
*/
function _appendEntry(basePath, scope, entry) {
const filePath = _resolvePath(basePath, scope);
const dir = filePath.slice(0, filePath.lastIndexOf("/"));
mkdirSync(dir, { recursive: true });
// Ensure file exists so proper-lockfile can acquire a lock against it.
if (!existsSync(filePath)) {
closeSync(openSync(filePath, "a"));
}
withFileLockSync(filePath, () => {
appendFileSync(filePath, JSON.stringify(entry) + "\n", "utf-8");
});
}
/**
* Read all entries from the store, skipping corrupt lines and grouping by id.
* The latest entry (by created_at) per ID wins.
*
* @param {string} basePath
* @param {import("./schedule-types.js").ScheduleScope} scope
* @returns {import("./schedule-types.js").ScheduleEntry[]}
*/
function _readEntries(basePath, scope) {
const filePath = _resolvePath(basePath, scope);
if (!existsSync(filePath)) {
return [];
}
let raw;
try {
raw = readFileSync(filePath, "utf-8");
} catch {
return [];
}
/** @type {Map<string, import("./schedule-types.js").ScheduleEntry>} */
const byId = new Map();
let corruptCount = 0;
for (const line of raw.split("\n")) {
if (!line.trim()) continue;
try {
/** @type {import("./schedule-types.js").ScheduleEntry} */
const entry = JSON.parse(line);
if (!entry || typeof entry.id !== "string") continue;
const existing = byId.get(entry.id);
if (!existing || entry.created_at > existing.created_at) {
byId.set(entry.id, entry);
}
} catch {
corruptCount++;
}
}
if (corruptCount > 0) {
_warn(
`schedule-store: skipped ${corruptCount} corrupt line(s) in ${filePath}`,
);
}
return Array.from(byId.values());
}
/**
* Return pending entries whose due_at is at or before `now`, sorted by due_at ASC.
*
* @param {string} basePath
* @param {import("./schedule-types.js").ScheduleScope} scope
* @param {string|number|Date} now
* @returns {import("./schedule-types.js").ScheduleEntry[]}
*/
function _findDue(basePath, scope, now) {
const entries = _readEntries(basePath, scope);
const nowMs = _toTimestamp(now);
return entries
.filter((e) => e.status === "pending" && _toTimestamp(e.due_at) <= nowMs)
.sort((a, b) => _toTimestamp(a.due_at) - _toTimestamp(b.due_at));
}
/**
* Return pending entries whose due_at is after `now` and within `windowDays`,
* sorted by due_at ASC.
*
* @param {string} basePath
* @param {import("./schedule-types.js").ScheduleScope} scope
* @param {string|number|Date} now
* @param {number} windowDays
* @returns {import("./schedule-types.js").ScheduleEntry[]}
*/
function _findUpcoming(basePath, scope, now, windowDays) {
const entries = _readEntries(basePath, scope);
const nowMs = _toTimestamp(now);
const windowMs = windowDays * 24 * 60 * 60 * 1000;
const cutoff = nowMs + windowMs;
return entries
.filter(
(e) =>
e.status === "pending" &&
_toTimestamp(e.due_at) > nowMs &&
_toTimestamp(e.due_at) <= cutoff,
)
.sort((a, b) => _toTimestamp(a.due_at) - _toTimestamp(b.due_at));
}
// ─── Helpers ────────────────────────────────────────────────────────────────
/**
* Convert an ISO string, Date, or epoch ms to a numeric timestamp.
*
* @param {string|number|Date} value
* @returns {number}
*/
function _toTimestamp(value) {
if (typeof value === "number") return value;
if (value instanceof Date) return value.getTime();
return Date.parse(value);
}
/**
* Write a warning to stderr without throwing.
*
* @param {string} message
*/
function _warn(message) {
try {
process.stderr.write(`[sf:schedule-store] ${message}\n`);
} catch {
// Best-effort warning — never break the caller.
}
}

View file

@ -0,0 +1,99 @@
/**
* Schedule Types core JSDoc type definitions for the SF schedule module.
*
* Purpose: provide a single source of truth for schedule entry shapes so that
* the store, queries, and CLI commands all agree on the data contract.
*
* Consumer: schedule-store.js, schedule CLI commands (S02), and UI overlays.
*/
// ─── Enums ──────────────────────────────────────────────────────────────────
/**
* @typedef {("project"|"global")} ScheduleScope
* project entries stored in `<basePath>/.sf/schedule.jsonl`
* global entries stored in `~/.sf/schedule.jsonl`
*/
/**
* @typedef {("pending"|"done"|"cancelled"|"snoozed")} ScheduleStatus
*/
/**
* @typedef {("reminder"|"milestone_check"|"review_due"|"recurring")} ScheduleKind
*/
/**
* @typedef {("auto"|"user"|"system")} ScheduleCreatedBy
*/
// ─── Payloads (discriminated union by kind) ─────────────────────────────────
/**
* @typedef {object} ReminderPayload
* @property {string} [message] Human-readable reminder text
* @property {string} [unitId] Associated unit (e.g. "M001/S01/T01")
* @property {string} [milestoneId] Associated milestone
*/
/**
* @typedef {object} MilestoneCheckPayload
* @property {string} milestoneId Milestone to check
* @property {string} [checkType] "validation" | "completion" | "scope-creep"
*/
/**
* @typedef {object} ReviewDuePayload
* @property {string} [prUrl] Link to PR requiring review
* @property {string} [reviewer] Expected reviewer handle
* @property {string} [unitId] Unit that produced the artifact
*/
/**
* @typedef {object} RecurringPayload
* @property {string} cron Cron expression (e.g. "0 9 * * 1")
* @property {string} [unitId] Unit to re-schedule on each tick
* @property {string} [milestoneId] Milestone to associate with
*/
/**
* @typedef {ReminderPayload|MilestoneCheckPayload|ReviewDuePayload|RecurringPayload} SchedulePayload
*/
// ─── Entry ──────────────────────────────────────────────────────────────────
/**
* @typedef {object} ScheduleEntry
* @property {string} id ULID monotonic, sortable, 28 chars
* @property {ScheduleKind} kind What kind of scheduled item this is
* @property {ScheduleStatus} status Current lifecycle status
* @property {string} due_at ISO-8601 timestamp
* @property {string} created_at ISO-8601 timestamp
* @property {SchedulePayload} payload Kind-specific data
* @property {ScheduleCreatedBy} created_by Who created the entry
*/
// ─── Guards ─────────────────────────────────────────────────────────────────
/** @type {Set<string>} */
const VALID_KINDS = new Set([
"reminder",
"milestone_check",
"review_due",
"recurring",
]);
/**
* Validate that a string is a known schedule kind.
*
* Purpose: prevent corrupt or future-kind entries from polluting the store
* when reading untrusted JSONL lines.
*
* Consumer: schedule-store.js readEntries() filter, CLI input validation.
*
* @param {unknown} value
* @returns {value is ScheduleKind}
*/
export function isValidKind(value) {
return typeof value === "string" && VALID_KINDS.has(value);
}

View file

@ -0,0 +1,107 @@
/**
* Schedule ULID monotonic, sortable identifier generator.
*
* Format: `01` prefix + 10-char Crockford-Base32 timestamp + 16-char random suffix.
* Total length: 28 characters.
*
* Purpose: generate time-sortable, collision-resistant IDs for schedule entries
* without an external dependency. Monotonicity within the same millisecond is
* guaranteed by tracking the last random suffix and incrementing it when needed.
*
* Consumer: schedule-store.js appendEntry() to assign entry IDs.
*/
import { randomUUID } from "node:crypto";
// ─── Constants ──────────────────────────────────────────────────────────────
/** Crockford Base32 alphabet (excludes I, L, O, U to avoid ambiguity). */
const CROCKFORD = "0123456789ABCDEFGHJKMNPQRSTVWXYZ";
/** Number of characters for the timestamp portion (50 bits). */
const TS_CHARS = 10;
/** Number of characters for the random portion (80 bits). */
const RANDOM_CHARS = 16;
/** ULID prefix. */
const PREFIX = "01";
/** Mask for 80 bits. */
const RANDOM_MASK = (1n << 80n) - 1n;
// ─── Module State ───────────────────────────────────────────────────────────
/** Last timestamp used (milliseconds since epoch). */
let _lastTimestamp = 0;
/** Last random suffix value (as a BigInt for precise arithmetic). */
let _lastRandomValue = 0n;
// ─── Public API ─────────────────────────────────────────────────────────────
/**
* Generate a new ULID string.
*
* Uses Date.now() for the timestamp and crypto.randomUUID() for entropy.
* If two calls occur within the same millisecond, the random suffix is
* strictly incremented to preserve monotonic ordering.
*
* Purpose: produce deterministic-sortable IDs that work across processes
* (no shared counter file) while keeping collision probability negligible.
*
* Consumer: schedule-store.js when appending new entries.
*
* @returns {string} 28-character ULID
*/
export function generateULID() {
const ts = Date.now();
if (ts <= _lastTimestamp) {
// Clock unchanged or moved backwards — maintain monotonicity by
// incrementing the random suffix. This matches standard ULID behaviour.
_lastRandomValue = (_lastRandomValue + 1n) & RANDOM_MASK;
if (_lastRandomValue === 0n) {
// 80-bit overflow — bump timestamp and reset random.
_lastTimestamp = _lastTimestamp + 1;
_lastRandomValue = _extractRandomFromUUID();
}
} else {
_lastTimestamp = ts;
_lastRandomValue = _extractRandomFromUUID();
}
const tsPart = _encodeBase32(BigInt(_lastTimestamp), TS_CHARS);
const randPart = _encodeBase32(_lastRandomValue, RANDOM_CHARS);
return `${PREFIX}${tsPart}${randPart}`;
}
// ─── Internal ───────────────────────────────────────────────────────────────
/**
* Encode a non-negative integer as a fixed-length Crockford Base32 string.
*
* @param {bigint} value
* @param {number} length
* @returns {string}
*/
function _encodeBase32(value, length) {
let result = "";
for (let i = 0; i < length; i++) {
const idx = Number(value & 0x1fn);
result = CROCKFORD[idx] + result;
value = value >> 5n;
}
return result;
}
/**
* Extract 80 bits of randomness from a UUID v4.
*
* @returns {bigint}
*/
function _extractRandomFromUUID() {
const hex = randomUUID().replace(/-/g, "");
// Take the lower 80 bits of the 128-bit UUID value.
const full = BigInt("0x" + hex);
return full & RANDOM_MASK;
}

View file

@ -1,200 +0,0 @@
import { describe, expect, it } from "vitest";
/**
* Tests for plan-quality.js ceremony depth validation.
*
* Purpose: verify that ceremony validators enforce not just presence but
* purposeful depth rubber-stamp contributions like "This is fine" must
* trigger shallow warnings even though they pass presence checks.
*
* Consumer: slice planning pipeline (plan-slice tool, plan quality inspection).
*/
async function importPlanQuality() {
return import("../plan-quality.js");
}
describe("plan-quality ceremony depth", () => {
describe("hasCompleteAdversarialReview", () => {
it("rejects_missing_fields", async () => {
const { hasCompleteAdversarialReview } = await importPlanQuality();
expect(hasCompleteAdversarialReview(null)).toBe(false);
expect(hasCompleteAdversarialReview({})).toBe(false);
expect(hasCompleteAdversarialReview({ partner: "", combatant: "ok", architect: "ok" })).toBe(false);
});
it("accepts_present_fields_even_if_shallow", async () => {
const { hasCompleteAdversarialReview } = await importPlanQuality();
expect(hasCompleteAdversarialReview({
partner: "Looks good",
combatant: "Seems fine",
architect: "Agree",
})).toBe(true);
});
it("accepts_purposeful_fields", async () => {
const { hasCompleteAdversarialReview } = await importPlanQuality();
expect(hasCompleteAdversarialReview({
partner: "The plan correctly identifies the coupling between auto-dispatch.ts and phases.ts. File paths are specific and the approach is grounded in existing patterns.",
combatant: "Risk 1: state machine refactor will take longer due to coupling in auto-dispatch.ts. Risk 2: schema migration breaks existing parsers. Risk 3: synthesis gate adds latency.",
architect: "Three subsystems affected: dispatch controller (auto-dispatch.ts), planning artifacts (PLAN.md), and verification gate. Coupling point: dispatch ↔ verification.",
})).toBe(true);
});
});
describe("inspectSlicePlanMarkdown depth warnings", () => {
const makePlanMarkdown = (partner, combatant, architect) => `# Slice Plan
## Adversarial Review
### Partner Review
${partner}
### Combatant Review
${combatant}
### Architect Review
${architect}
`;
it("flags_shallow_partner_review", async () => {
const { inspectSlicePlanMarkdown } = await importPlanQuality();
const content = makePlanMarkdown(
"Looks good to me",
"Risk: might be hard. Risk: could break. Risk: unknown. Mitigation: be careful and test thoroughly before merging.",
"System-fit: the dispatch controller (auto-dispatch.ts) and planning artifacts (PLAN.md) are affected. Coupling point: dispatch ↔ verification gate.",
);
const result = inspectSlicePlanMarkdown(content);
expect(result.issues).toContain(
"shallow partner review — cite specific evidence (file paths, test gaps, prior learnings)",
);
});
it("flags_shallow_combatant_review", async () => {
const { inspectSlicePlanMarkdown } = await importPlanQuality();
const content = makePlanMarkdown(
"Strong plan grounded in the existing auto-dispatch.ts patterns. File paths are specific and verification strategy is sound.",
"This is fine",
"System-fit: the dispatch controller (auto-dispatch.ts) and planning artifacts (PLAN.md) are affected. Coupling point: dispatch ↔ verification gate.",
);
const result = inspectSlicePlanMarkdown(content);
expect(result.issues).toContain(
"shallow combatant review — name specific risks with concrete failure scenarios",
);
});
it("flags_shallow_architect_review", async () => {
const { inspectSlicePlanMarkdown } = await importPlanQuality();
const content = makePlanMarkdown(
"Strong plan grounded in the existing auto-dispatch.ts patterns. File paths are specific and verification strategy is sound.",
"Risk 1: state machine refactor takes longer. Risk 2: schema migration breaks parsers. Risk 3: latency overhead from synthesis gate. Mitigation: timebox and simplify.",
"Agree with plan",
);
const result = inspectSlicePlanMarkdown(content);
expect(result.issues).toContain(
"shallow architect review — name affected subsystems and coupling points",
);
});
it("passes_with_purposeful_reviews", async () => {
const { inspectSlicePlanMarkdown } = await importPlanQuality();
const content = makePlanMarkdown(
"Strong plan grounded in the existing auto-dispatch.ts patterns. File paths are specific and verification strategy is sound.",
"Risk 1: state machine refactor takes longer due to coupling. Risk 2: schema migration breaks parsers. Risk 3: latency overhead. Mitigation: timebox.",
"Three subsystems affected: dispatch controller, planning artifacts, and verification gate. Coupling point: dispatch ↔ verification.",
);
const result = inspectSlicePlanMarkdown(content);
const shallowIssues = result.issues.filter((i) => i.startsWith("shallow"));
expect(shallowIssues).toHaveLength(0);
});
it("still_flags_missing_sections", async () => {
const { inspectSlicePlanMarkdown } = await importPlanQuality();
const content = "# Slice Plan\n\nNo adversarial review section.\n";
const result = inspectSlicePlanMarkdown(content);
expect(result.issues).toContain("missing adversarial review");
});
it("flags_at_boundary_79_chars", async () => {
const { inspectSlicePlanMarkdown } = await importPlanQuality();
const shortPartner = "a".repeat(79);
const content = makePlanMarkdown(
shortPartner,
"Risk 1: state machine refactor takes longer due to coupling. Risk 2: schema migration breaks parsers. Risk 3: latency overhead from synthesis gate. Mitigation: timebox.",
"Three subsystems affected: dispatch controller, planning artifacts, and verification gate. Coupling point: dispatch ↔ verification.",
);
const result = inspectSlicePlanMarkdown(content);
expect(result.issues).toContain(
"shallow partner review — cite specific evidence (file paths, test gaps, prior learnings)",
);
});
it("passes_at_boundary_80_chars", async () => {
const { inspectSlicePlanMarkdown } = await importPlanQuality();
const longEnoughPartner = "a".repeat(80);
const content = makePlanMarkdown(
longEnoughPartner,
"Risk 1: state machine refactor takes longer due to coupling. Risk 2: schema migration breaks parsers. Risk 3: latency overhead from synthesis gate. Mitigation: timebox.",
"Three subsystems affected: dispatch controller, planning artifacts, and verification gate. Coupling point: dispatch ↔ verification.",
);
const result = inspectSlicePlanMarkdown(content);
expect(result.issues).not.toContain(
"shallow partner review — cite specific evidence (file paths, test gaps, prior learnings)",
);
});
it("accumulates_multiple_shallow_warnings", async () => {
const { inspectSlicePlanMarkdown } = await importPlanQuality();
const content = makePlanMarkdown("Short", "Brief", "Yep");
const result = inspectSlicePlanMarkdown(content);
expect(result.issues).toContain(
"shallow partner review — cite specific evidence (file paths, test gaps, prior learnings)",
);
expect(result.issues).toContain(
"shallow combatant review — name specific risks with concrete failure scenarios",
);
expect(result.issues).toContain(
"shallow architect review — name affected subsystems and coupling points",
);
const shallowIssues = result.issues.filter((i) => i.startsWith("shallow"));
expect(shallowIssues).toHaveLength(3);
});
});
describe("hasStructuredPlanningMeeting", () => {
const makeValidMeeting = () => ({
trigger: "Planning needed for slice decomposition",
pm: "User value is clear — this improves auto-mode reliability by preventing context oscillation",
researcher: "Claude Code coordinatorMode.ts shows the synthesis gate pattern works at Anthropic scale",
partner: "Strong plan grounded in existing auto-dispatch.ts patterns with specific file paths and evidence",
combatant: "Risk 1: state machine refactor takes longer. Risk 2: schema migration breaks parsers. Risk 3: latency.",
architect: "Dispatch controller, planning artifacts, and verification gate affected. Coupling: dispatch ↔ verification.",
moderator: "Approve with timebox on S04. Combatant's latency concern addressed by adding synthesis latency budget.",
confidenceSummary: "High confidence on foundation tier, medium on architecture tier due to state machine coupling",
recommendedRoute: "planning",
});
it("accepts_valid_meeting", async () => {
const { hasStructuredPlanningMeeting } = await importPlanQuality();
expect(hasStructuredPlanningMeeting(makeValidMeeting())).toBe(true);
});
it("rejects_missing_meeting", async () => {
const { hasStructuredPlanningMeeting } = await importPlanQuality();
expect(hasStructuredPlanningMeeting(null)).toBe(false);
expect(hasStructuredPlanningMeeting(undefined)).toBe(false);
});
it("rejects_invalid_route", async () => {
const { hasStructuredPlanningMeeting } = await importPlanQuality();
const meeting = { ...makeValidMeeting(), recommendedRoute: "executing" };
expect(hasStructuredPlanningMeeting(meeting)).toBe(false);
});
it("accepts_researching_route", async () => {
const { hasStructuredPlanningMeeting } = await importPlanQuality();
const meeting = { ...makeValidMeeting(), recommendedRoute: "researching" };
expect(hasStructuredPlanningMeeting(meeting)).toBe(true);
});
});
});

View file

@ -0,0 +1,289 @@
/**
* Schedule Store + Types + ULID unit tests.
*
* Purpose: verify the schedule storage layer contracts: append-only writes,
* status-grouping semantics, time-based queries, ULID monotonicity, and
* corrupt-line resilience.
*
* Consumer: CI test runner (vitest).
*/
import assert from "node:assert/strict";
import { mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { afterEach, beforeEach, describe, it } from "vitest";
import {
createScheduleStore,
resolveSchedulePath,
} from "../schedule/schedule-store.js";
import { isValidKind } from "../schedule/schedule-types.js";
import { generateULID } from "../schedule/schedule-ulid.js";
describe("schedule-types", () => {
describe("isValidKind", () => {
it("accepts known kinds", () => {
assert.equal(isValidKind("reminder"), true);
assert.equal(isValidKind("milestone_check"), true);
assert.equal(isValidKind("review_due"), true);
assert.equal(isValidKind("recurring"), true);
});
it("rejects unknown kinds", () => {
assert.equal(isValidKind("unknown"), false);
assert.equal(isValidKind(""), false);
assert.equal(isValidKind(null), false);
assert.equal(isValidKind(undefined), false);
assert.equal(isValidKind(42), false);
});
});
});
describe("schedule-ulid", () => {
describe("generateULID", () => {
it("produces 28-character strings starting with 01", () => {
const id = generateULID();
assert.equal(typeof id, "string");
assert.equal(id.length, 28);
assert.equal(id.startsWith("01"), true);
});
it("uses only Crockford-Base32 characters", () => {
const id = generateULID();
const crockford = /^[0123456789ABCDEFGHJKMNPQRSTVWXYZ]+$/;
assert.ok(crockford.test(id), `id ${id} contains non-Crockford chars`);
});
it("generates unique IDs across many calls", () => {
const ids = new Set();
for (let i = 0; i < 1000; i++) {
ids.add(generateULID());
}
assert.equal(ids.size, 1000);
});
it("is monotonic within the same millisecond", () => {
const ids = [];
// Generate several IDs in rapid succession — likely same ms
for (let i = 0; i < 20; i++) {
ids.push(generateULID());
}
for (let i = 1; i < ids.length; i++) {
assert.ok(
ids[i] > ids[i - 1],
`expected ${ids[i]} > ${ids[i - 1]} at index ${i}`,
);
}
});
});
});
describe("schedule-store", () => {
/** @type {string} */
let testDir;
/** @type {ReturnType<typeof createScheduleStore>} */
let store;
beforeEach(() => {
testDir = join(
tmpdir(),
`sf-schedule-test-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
);
mkdirSync(testDir, { recursive: true });
store = createScheduleStore(testDir);
});
afterEach(() => {
try {
rmSync(testDir, { recursive: true });
} catch {
// ignore
}
});
function makeEntry(overrides = {}) {
const now = new Date().toISOString();
return {
id: generateULID(),
kind: "reminder",
status: "pending",
due_at: now,
created_at: now,
payload: { message: "test" },
created_by: "user",
...overrides,
};
}
describe("appendEntry + readEntries", () => {
it("writes an entry and reads it back", () => {
const entry = makeEntry();
store.appendEntry("project", entry);
const entries = store.readEntries("project");
assert.equal(entries.length, 1);
assert.equal(entries[0].id, entry.id);
});
it("creates the file and directory if missing", () => {
const entry = makeEntry();
store.appendEntry("project", entry);
const filePath = store._filePathForScope("project");
assert.ok(readFileSync(filePath, "utf-8").includes(entry.id));
});
it("appends multiple entries", () => {
const e1 = makeEntry();
const e2 = makeEntry();
store.appendEntry("project", e1);
store.appendEntry("project", e2);
const entries = store.readEntries("project");
assert.equal(entries.length, 2);
});
it("groups by id keeping the latest created_at", () => {
const id = generateULID();
const early = makeEntry({
id,
status: "pending",
created_at: "2024-01-01T00:00:00.000Z",
});
const late = makeEntry({
id,
status: "done",
created_at: "2024-01-02T00:00:00.000Z",
});
store.appendEntry("project", early);
store.appendEntry("project", late);
const entries = store.readEntries("project");
assert.equal(entries.length, 1);
assert.equal(entries[0].status, "done");
});
it("returns empty array for missing file", () => {
const entries = store.readEntries("project");
assert.deepEqual(entries, []);
});
});
describe("findDue", () => {
it("returns only pending entries at or before now", () => {
const past = makeEntry({
due_at: "2024-01-01T00:00:00.000Z",
status: "pending",
});
const future = makeEntry({
due_at: "2030-01-01T00:00:00.000Z",
status: "pending",
});
const done = makeEntry({
due_at: "2024-01-01T00:00:00.000Z",
status: "done",
});
store.appendEntry("project", past);
store.appendEntry("project", future);
store.appendEntry("project", done);
const due = store.findDue("project", "2024-06-01T00:00:00.000Z");
assert.equal(due.length, 1);
assert.equal(due[0].id, past.id);
});
it("sorts results by due_at ascending", () => {
const e1 = makeEntry({ due_at: "2024-01-02T00:00:00.000Z" });
const e2 = makeEntry({ due_at: "2024-01-01T00:00:00.000Z" });
store.appendEntry("project", e1);
store.appendEntry("project", e2);
const due = store.findDue("project", "2024-06-01T00:00:00.000Z");
assert.equal(due[0].id, e2.id);
assert.equal(due[1].id, e1.id);
});
});
describe("findUpcoming", () => {
it("returns pending entries within the window", () => {
const past = makeEntry({
due_at: "2024-01-01T00:00:00.000Z",
status: "pending",
});
const soon = makeEntry({
due_at: "2024-01-02T00:00:00.000Z",
status: "pending",
});
const far = makeEntry({
due_at: "2024-01-15T00:00:00.000Z",
status: "pending",
});
store.appendEntry("project", past);
store.appendEntry("project", soon);
store.appendEntry("project", far);
const upcoming = store.findUpcoming(
"project",
"2024-01-01T00:00:00.000Z",
7,
);
assert.equal(upcoming.length, 1);
assert.equal(upcoming[0].id, soon.id);
});
it("excludes non-pending entries", () => {
const soonDone = makeEntry({
due_at: "2024-01-02T00:00:00.000Z",
status: "done",
});
store.appendEntry("project", soonDone);
const upcoming = store.findUpcoming(
"project",
"2024-01-01T00:00:00.000Z",
7,
);
assert.equal(upcoming.length, 0);
});
it("sorts results by due_at ascending", () => {
const e1 = makeEntry({ due_at: "2024-01-03T00:00:00.000Z" });
const e2 = makeEntry({ due_at: "2024-01-02T00:00:00.000Z" });
store.appendEntry("project", e1);
store.appendEntry("project", e2);
const upcoming = store.findUpcoming(
"project",
"2024-01-01T00:00:00.000Z",
7,
);
assert.equal(upcoming[0].id, e2.id);
assert.equal(upcoming[1].id, e1.id);
});
});
describe("corrupt line handling", () => {
it("skips corrupt JSONL lines and returns valid entries", () => {
const entry = makeEntry();
store.appendEntry("project", entry);
// Inject a corrupt line directly into the file
const filePath = store._filePathForScope("project");
const content = readFileSync(filePath, "utf-8");
writeFileSync(filePath, content + "this is not json\n", "utf-8");
const entries = store.readEntries("project");
assert.equal(entries.length, 1);
assert.equal(entries[0].id, entry.id);
});
});
describe("resolveSchedulePath", () => {
it("returns project path for project scope", () => {
const p = resolveSchedulePath(testDir, "project");
assert.ok(p.endsWith("schedule.jsonl"));
assert.ok(p.includes(".sf"));
});
it("returns global path for global scope", () => {
const p = resolveSchedulePath(testDir, "global");
assert.ok(p.endsWith("schedule.jsonl"));
assert.ok(p.includes(".sf") || p.startsWith(tmpdir()));
});
});
});