fix(self-feedback,state): close two state-drift gaps
1. Self-feedback JSONL is now a real append-only audit log. Previously
markResolved updated the DB row in place but never echoed the
resolution to JSONL, so a DB rebuild via importLegacyJsonlToDb would
re-import all entries with their original pre-resolution state and
silently lose every resolution that had ever landed. The JSONL was a
half event log — creations yes, resolutions no.
- Introduce a `recordType: "resolution"` JSONL record shape. Append
one of these to the project JSONL whenever markResolved succeeds
against the DB. Best-effort: failure to append never blocks the
resolution itself.
- Extend importLegacyJsonlToDb to handle both record types. Entry
creations go through insertSelfFeedbackEntry (ON CONFLICT DO
NOTHING — idempotent). Resolution events go through
resolveSelfFeedbackEntry, which is already a no-op on missing or
already-resolved rows, so replay is idempotent.
- Tests cover: the appended record shape; a DB rebuild correctly
reconstructing resolved_at/resolved_evidence_json from a JSONL
audit trail; orphan resolution events (entry never existed) are a
silent no-op.
Closes self-feedback entry sf-mp4ikbta-2zcbhh.
2. The reconcile path at state-db.js:reconcileSliceTasks warns when an
on-disk SUMMARY.md exists for a task whose DB row is still pending
and refuses to silently import — a safety check so autonomous runs
can't promote themselves to complete by writing a SUMMARY without a
real DB transition. But operators had no remediation path when the
drift was real (lost DB write, hand edit). They had to mutate the
DB by hand.
- New `state-reconcile.js` with `reconcileTaskFromSummary` exposes
the remediation explicitly. Parses the SUMMARY via the existing
parseSummary helper, validates via isValidTaskSummary, and writes
status / completed_at / verification_result / blocker /
key_files / full_summary_md into the DB row through a new
`setTaskSummaryFields` helper in sf-db-tasks.
- Returns structured { ok, reason, applied } outcomes — never
throws — so operator tooling can branch on `db-unavailable`,
`summary-missing`, `summary-invalid`, `task-not-in-db`,
`already-done`.
- The reconcile warning text now points at the helper.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
5f245b721d
commit
ce58d32231
6 changed files with 527 additions and 4 deletions
|
|
@ -231,16 +231,66 @@ function readJsonl(path) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* Wire-format tag for self-feedback JSONL records.
|
||||||
|
*
|
||||||
|
* Older entries pre-date this tag and have no `recordType` field; readers
|
||||||
|
* treat absence (or "entry") as a regular entry creation. The "resolution"
|
||||||
|
* tag was added so JSONL can carry resolution events as well as entries,
|
||||||
|
* making it a real append-only audit log instead of a half-event-log that
|
||||||
|
* loses resolution history on DB rebuild.
|
||||||
|
*/
|
||||||
|
const SELF_FEEDBACK_RECORD_ENTRY = "entry";
|
||||||
|
const SELF_FEEDBACK_RECORD_RESOLUTION = "resolution";
|
||||||
|
|
||||||
|
function isResolutionRecord(record) {
|
||||||
|
return (
|
||||||
|
record && record.recordType === SELF_FEEDBACK_RECORD_RESOLUTION && record.entryId
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function importLegacyJsonlToDb(basePath) {
|
function importLegacyJsonlToDb(basePath) {
|
||||||
if (!isDbAvailable()) return;
|
if (!isDbAvailable()) return;
|
||||||
for (const entry of readJsonl(projectJsonlPath(basePath))) {
|
for (const record of readJsonl(projectJsonlPath(basePath))) {
|
||||||
try {
|
try {
|
||||||
insertSelfFeedbackEntry(entry);
|
if (isResolutionRecord(record)) {
|
||||||
|
// Resolution event from the audit log. Apply to the existing DB
|
||||||
|
// row if it exists and is not already resolved — never clobbers
|
||||||
|
// a credible resolution that landed via a different path.
|
||||||
|
resolveSelfFeedbackEntry(record.entryId, {
|
||||||
|
reason: record.resolvedReason,
|
||||||
|
evidence: record.resolvedEvidence,
|
||||||
|
criteriaMet: record.resolvedCriteriaMet,
|
||||||
|
resolvedBySfVersion: record.resolvedBySfVersion,
|
||||||
|
resolvedAt: record.resolvedAt,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Legacy / new entry creation record.
|
||||||
|
insertSelfFeedbackEntry(record);
|
||||||
|
}
|
||||||
} catch {
|
} catch {
|
||||||
/* non-fatal compatibility import */
|
/* non-fatal compatibility import */
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function appendResolutionToJsonl(basePath, entryId, resolution, resolvedAt) {
|
||||||
|
const path = projectJsonlPath(basePath);
|
||||||
|
const record = {
|
||||||
|
recordType: SELF_FEEDBACK_RECORD_RESOLUTION,
|
||||||
|
entryId,
|
||||||
|
resolvedAt: resolvedAt ?? new Date().toISOString(),
|
||||||
|
resolvedReason: resolution.reason,
|
||||||
|
resolvedEvidence: resolution.evidence,
|
||||||
|
resolvedCriteriaMet: resolution.criteriaMet,
|
||||||
|
resolvedBySfVersion: getCurrentSfVersion(),
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
appendJsonl(path, record);
|
||||||
|
} catch {
|
||||||
|
/* non-fatal — JSONL audit append must never block resolution */
|
||||||
|
}
|
||||||
|
}
|
||||||
function formatOpenMarkdownRow(entry) {
|
function formatOpenMarkdownRow(entry) {
|
||||||
const unit = formatUnitCell(entry.occurredIn);
|
const unit = formatUnitCell(entry.occurredIn);
|
||||||
const summary = escapeCell(entry.summary);
|
const summary = escapeCell(entry.summary);
|
||||||
|
|
@ -377,11 +427,19 @@ export function markResolved(entryId, resolution, basePath = process.cwd()) {
|
||||||
if (isForgeRepo(basePath) && isDbAvailable()) {
|
if (isForgeRepo(basePath) && isDbAvailable()) {
|
||||||
try {
|
try {
|
||||||
importLegacyJsonlToDb(basePath);
|
importLegacyJsonlToDb(basePath);
|
||||||
|
const resolvedAt = new Date().toISOString();
|
||||||
const mutated = resolveSelfFeedbackEntry(entryId, {
|
const mutated = resolveSelfFeedbackEntry(entryId, {
|
||||||
...resolution,
|
...resolution,
|
||||||
resolvedBySfVersion: getCurrentSfVersion(),
|
resolvedBySfVersion: getCurrentSfVersion(),
|
||||||
|
resolvedAt,
|
||||||
});
|
});
|
||||||
if (mutated) regenerateSelfFeedbackMarkdown(basePath);
|
if (mutated) {
|
||||||
|
// Append a resolution event to the JSONL audit log so the
|
||||||
|
// resolution survives a DB rebuild. importLegacyJsonlToDb knows
|
||||||
|
// how to replay these events into existing DB rows.
|
||||||
|
appendResolutionToJsonl(basePath, entryId, resolution, resolvedAt);
|
||||||
|
regenerateSelfFeedbackMarkdown(basePath);
|
||||||
|
}
|
||||||
return mutated;
|
return mutated;
|
||||||
} catch {
|
} catch {
|
||||||
/* fall through to legacy JSONL */
|
/* fall through to legacy JSONL */
|
||||||
|
|
|
||||||
|
|
@ -376,6 +376,56 @@ export function setTaskSummaryMd(milestoneId, sliceId, taskId, md) {
|
||||||
.run({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId, ":md": md });
|
.run({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId, ":md": md });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply on-disk SUMMARY.md frontmatter and body to the DB task row.
|
||||||
|
*
|
||||||
|
* Purpose: operator-invoked remediation when the DB-driven reconcile refuses
|
||||||
|
* to silently import disk state (state-db.js:reconcileSliceTasks). Writes
|
||||||
|
* status, completed_at, verification_result, key_files (as JSON), blocker
|
||||||
|
* flag, and the full markdown body in a single UPDATE so the row matches the
|
||||||
|
* on-disk SUMMARY shape.
|
||||||
|
*
|
||||||
|
* Consumer: state-reconcile.js: reconcileTaskFromSummary.
|
||||||
|
*/
|
||||||
|
export function setTaskSummaryFields(
|
||||||
|
milestoneId,
|
||||||
|
sliceId,
|
||||||
|
taskId,
|
||||||
|
{
|
||||||
|
status,
|
||||||
|
completedAt,
|
||||||
|
verificationResult,
|
||||||
|
blockerDiscovered,
|
||||||
|
keyFiles,
|
||||||
|
summaryMd,
|
||||||
|
},
|
||||||
|
) {
|
||||||
|
const currentDb = _getAdapter();
|
||||||
|
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||||
|
currentDb
|
||||||
|
.prepare(
|
||||||
|
`UPDATE tasks SET
|
||||||
|
status = :status,
|
||||||
|
completed_at = :completed_at,
|
||||||
|
verification_result = :verification_result,
|
||||||
|
blocker_discovered = :blocker_discovered,
|
||||||
|
key_files = :key_files,
|
||||||
|
full_summary_md = :summary_md
|
||||||
|
WHERE milestone_id = :mid AND slice_id = :sid AND id = :tid`,
|
||||||
|
)
|
||||||
|
.run({
|
||||||
|
":mid": milestoneId,
|
||||||
|
":sid": sliceId,
|
||||||
|
":tid": taskId,
|
||||||
|
":status": status,
|
||||||
|
":completed_at": completedAt ?? null,
|
||||||
|
":verification_result": verificationResult ?? "",
|
||||||
|
":blocker_discovered": blockerDiscovered ? 1 : 0,
|
||||||
|
":key_files": JSON.stringify(keyFiles ?? []),
|
||||||
|
":summary_md": summaryMd ?? "",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export function getActiveTaskFromDb(milestoneId, sliceId) {
|
export function getActiveTaskFromDb(milestoneId, sliceId) {
|
||||||
const currentDb = _getAdapter();
|
const currentDb = _getAdapter();
|
||||||
if (!currentDb) return null;
|
if (!currentDb) return null;
|
||||||
|
|
|
||||||
|
|
@ -428,7 +428,7 @@ async function reconcileSliceTasks(basePath, milestoneId, sliceId, planFile) {
|
||||||
if (summaryPath && existsSync(summaryPath)) {
|
if (summaryPath && existsSync(summaryPath)) {
|
||||||
logWarning(
|
logWarning(
|
||||||
"reconcile",
|
"reconcile",
|
||||||
`task ${milestoneId}/${sliceId}/${t.id} has SUMMARY on disk but DB status is "${t.status}"; refusing runtime status import`,
|
`task ${milestoneId}/${sliceId}/${t.id} has SUMMARY on disk but DB status is "${t.status}"; refusing runtime status import. Run reconcileTaskFromSummary() from ./state-reconcile.js to apply the on-disk SUMMARY into the DB row explicitly.`,
|
||||||
{ mid: milestoneId, sid: sliceId, tid: t.id },
|
{ mid: milestoneId, sid: sliceId, tid: t.id },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
90
src/resources/extensions/sf/state-reconcile.js
Normal file
90
src/resources/extensions/sf/state-reconcile.js
Normal file
|
|
@ -0,0 +1,90 @@
|
||||||
|
// SF Extension — Operator-invoked reconcile remediation
|
||||||
|
//
|
||||||
|
// The DB-driven reconcile path (state-db.js: reconcileSliceTasks) refuses to
|
||||||
|
// silently import on-disk SUMMARY metadata when the DB row says `pending` —
|
||||||
|
// that safety check exists so an autonomous run cannot accidentally promote
|
||||||
|
// itself to "complete" by writing a SUMMARY without a real DB transition.
|
||||||
|
//
|
||||||
|
// But the refusal leaves operators with no path forward when the drift is
|
||||||
|
// real (autonomous run completed a task but the DB write was lost, or the
|
||||||
|
// state was hand-edited). This module is the explicit remediation surface:
|
||||||
|
// parse the on-disk SUMMARY, validate it, and write its fields into the DB
|
||||||
|
// row — operator action only, never auto-triggered by reconcile.
|
||||||
|
|
||||||
|
import { existsSync, readFileSync } from "node:fs";
|
||||||
|
import { isValidTaskSummary, parseSummary } from "./files.js";
|
||||||
|
import { resolveTaskFile } from "./paths.js";
|
||||||
|
import {
|
||||||
|
getTask,
|
||||||
|
isDbAvailable,
|
||||||
|
setTaskSummaryFields,
|
||||||
|
} from "./sf-db.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply an on-disk task SUMMARY.md to the DB row.
|
||||||
|
*
|
||||||
|
* Returns one of:
|
||||||
|
* { ok: true, applied: <fields-applied> }
|
||||||
|
* { ok: false, reason: <code>, detail?: <string> }
|
||||||
|
*
|
||||||
|
* Reason codes (none throw — operator tooling needs structured outcomes):
|
||||||
|
* `db-unavailable` — sf.db is not open / not initialized
|
||||||
|
* `summary-missing` — no SUMMARY.md at the resolved task path
|
||||||
|
* `summary-invalid` — file exists but isValidTaskSummary rejected it
|
||||||
|
* `task-not-in-db` — no DB row for milestoneId/sliceId/taskId
|
||||||
|
* `already-done` — DB row is already in a terminal status; no-op
|
||||||
|
*/
|
||||||
|
export function reconcileTaskFromSummary(
|
||||||
|
basePath,
|
||||||
|
milestoneId,
|
||||||
|
sliceId,
|
||||||
|
taskId,
|
||||||
|
) {
|
||||||
|
if (!isDbAvailable()) {
|
||||||
|
return { ok: false, reason: "db-unavailable" };
|
||||||
|
}
|
||||||
|
|
||||||
|
const summaryPath = resolveTaskFile(
|
||||||
|
basePath,
|
||||||
|
milestoneId,
|
||||||
|
sliceId,
|
||||||
|
taskId,
|
||||||
|
"SUMMARY",
|
||||||
|
);
|
||||||
|
if (!summaryPath || !existsSync(summaryPath)) {
|
||||||
|
return { ok: false, reason: "summary-missing" };
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = readFileSync(summaryPath, "utf-8");
|
||||||
|
if (!isValidTaskSummary(content)) {
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
reason: "summary-invalid",
|
||||||
|
detail: `${summaryPath} exists but failed isValidTaskSummary`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const existing = getTask(milestoneId, sliceId, taskId);
|
||||||
|
if (!existing) {
|
||||||
|
return { ok: false, reason: "task-not-in-db" };
|
||||||
|
}
|
||||||
|
if (existing.status === "complete" || existing.status === "done") {
|
||||||
|
return { ok: false, reason: "already-done", detail: existing.status };
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = parseSummary(content);
|
||||||
|
const fm = parsed.frontmatter ?? {};
|
||||||
|
|
||||||
|
const fields = {
|
||||||
|
status: "complete",
|
||||||
|
completedAt: fm.completed_at || new Date().toISOString(),
|
||||||
|
verificationResult: fm.verification_result || "passed",
|
||||||
|
blockerDiscovered: !!fm.blocker_discovered,
|
||||||
|
keyFiles: Array.isArray(fm.key_files) ? fm.key_files : [],
|
||||||
|
summaryMd: content,
|
||||||
|
};
|
||||||
|
|
||||||
|
setTaskSummaryFields(milestoneId, sliceId, taskId, fields);
|
||||||
|
|
||||||
|
return { ok: true, applied: fields };
|
||||||
|
}
|
||||||
|
|
@ -140,6 +140,112 @@ test("markResolved_when_db_available_updates_sqlite_and_markdown_projection", ()
|
||||||
assert.match(markdown, /Recently Resolved/);
|
assert.match(markdown, /Recently Resolved/);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("markResolved_appends_resolution_event_to_jsonl_audit_log", () => {
|
||||||
|
const project = makeForgeProject();
|
||||||
|
const result = recordSelfFeedback(
|
||||||
|
{
|
||||||
|
kind: "audit-trail",
|
||||||
|
severity: "high",
|
||||||
|
summary: "Resolution must land in JSONL too",
|
||||||
|
},
|
||||||
|
project,
|
||||||
|
);
|
||||||
|
assert.ok(result?.entry.id);
|
||||||
|
|
||||||
|
// Seed an entry into JSONL so the audit log has both records (creation +
|
||||||
|
// resolution). recordSelfFeedback skips JSONL when DB is available, so we
|
||||||
|
// write the entry-creation record manually to model a real audit trail.
|
||||||
|
writeFileSync(
|
||||||
|
join(project, ".sf", "self-feedback.jsonl"),
|
||||||
|
`${JSON.stringify(result.entry)}\n`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const ok = markResolved(
|
||||||
|
result.entry.id,
|
||||||
|
{
|
||||||
|
reason: "fixed by 0123abc",
|
||||||
|
evidence: { kind: "agent-fix", commitSha: "0123abc456789" },
|
||||||
|
},
|
||||||
|
project,
|
||||||
|
);
|
||||||
|
assert.equal(ok, true);
|
||||||
|
|
||||||
|
const lines = readFileSync(join(project, ".sf", "self-feedback.jsonl"), "utf-8")
|
||||||
|
.split("\n")
|
||||||
|
.filter((l) => l.trim());
|
||||||
|
assert.equal(lines.length, 2);
|
||||||
|
const resolutionRecord = JSON.parse(lines[1]);
|
||||||
|
assert.equal(resolutionRecord.recordType, "resolution");
|
||||||
|
assert.equal(resolutionRecord.entryId, result.entry.id);
|
||||||
|
assert.equal(resolutionRecord.resolvedEvidence.kind, "agent-fix");
|
||||||
|
assert.equal(resolutionRecord.resolvedEvidence.commitSha, "0123abc456789");
|
||||||
|
assert.ok(resolutionRecord.resolvedAt);
|
||||||
|
assert.equal(resolutionRecord.resolvedReason, "fixed by 0123abc");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("importLegacyJsonlToDb_replays_resolution_events_onto_rebuilt_db", () => {
|
||||||
|
const project = makeForgeProject();
|
||||||
|
|
||||||
|
// Build a JSONL audit log with: entry creation + resolution event. This
|
||||||
|
// is the shape an existing JSONL will have after markResolved runs.
|
||||||
|
const entry = {
|
||||||
|
schemaVersion: 1,
|
||||||
|
id: "rebuild-1",
|
||||||
|
ts: "2026-05-13T10:00:00.000Z",
|
||||||
|
kind: "gap:foo",
|
||||||
|
severity: "medium",
|
||||||
|
blocking: false,
|
||||||
|
summary: "Entry from before DB rebuild",
|
||||||
|
repoIdentity: "forge",
|
||||||
|
sfVersion: "2.75.3",
|
||||||
|
basePath: project,
|
||||||
|
};
|
||||||
|
const resolution = {
|
||||||
|
recordType: "resolution",
|
||||||
|
entryId: "rebuild-1",
|
||||||
|
resolvedAt: "2026-05-13T11:30:00.000Z",
|
||||||
|
resolvedReason: "fixed by sha=def456",
|
||||||
|
resolvedEvidence: { kind: "agent-fix", commitSha: "def456" },
|
||||||
|
resolvedBySfVersion: "2.75.3",
|
||||||
|
};
|
||||||
|
writeFileSync(
|
||||||
|
join(project, ".sf", "self-feedback.jsonl"),
|
||||||
|
`${JSON.stringify(entry)}\n${JSON.stringify(resolution)}\n`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Simulate a DB rebuild: trigger import via readAllSelfFeedback.
|
||||||
|
const result = readAllSelfFeedback(project);
|
||||||
|
|
||||||
|
assert.equal(result.length, 1);
|
||||||
|
const reloaded = result[0];
|
||||||
|
assert.equal(reloaded.id, "rebuild-1");
|
||||||
|
assert.equal(reloaded.resolvedAt, "2026-05-13T11:30:00.000Z");
|
||||||
|
assert.equal(reloaded.resolvedReason, "fixed by sha=def456");
|
||||||
|
assert.deepEqual(reloaded.resolvedEvidence, {
|
||||||
|
kind: "agent-fix",
|
||||||
|
commitSha: "def456",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("importLegacyJsonlToDb_resolution_event_for_missing_entry_is_a_noop", () => {
|
||||||
|
const project = makeForgeProject();
|
||||||
|
const orphan = {
|
||||||
|
recordType: "resolution",
|
||||||
|
entryId: "never-existed",
|
||||||
|
resolvedAt: "2026-05-13T11:30:00.000Z",
|
||||||
|
resolvedReason: "stale audit log entry",
|
||||||
|
resolvedEvidence: { kind: "agent-fix", commitSha: "abc" },
|
||||||
|
};
|
||||||
|
writeFileSync(
|
||||||
|
join(project, ".sf", "self-feedback.jsonl"),
|
||||||
|
`${JSON.stringify(orphan)}\n`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should not throw, should not insert anything.
|
||||||
|
const result = readAllSelfFeedback(project);
|
||||||
|
assert.equal(result.length, 0);
|
||||||
|
});
|
||||||
|
|
||||||
test("compactSelfFeedbackMarkdown_when_projection_stale_rewrites_from_sqlite", () => {
|
test("compactSelfFeedbackMarkdown_when_projection_stale_rewrites_from_sqlite", () => {
|
||||||
const project = makeForgeProject();
|
const project = makeForgeProject();
|
||||||
const result = recordSelfFeedback(
|
const result = recordSelfFeedback(
|
||||||
|
|
|
||||||
219
src/resources/extensions/sf/tests/state-reconcile.test.mjs
Normal file
219
src/resources/extensions/sf/tests/state-reconcile.test.mjs
Normal file
|
|
@ -0,0 +1,219 @@
|
||||||
|
/**
|
||||||
|
* state-reconcile.test.mjs — reconcileTaskFromSummary remediation path.
|
||||||
|
*
|
||||||
|
* Purpose: prove the operator-invoked remediation correctly applies on-disk
|
||||||
|
* SUMMARY metadata into a pending DB row, and returns structured outcomes
|
||||||
|
* for the cases where it should NOT apply.
|
||||||
|
*/
|
||||||
|
import {
|
||||||
|
mkdirSync,
|
||||||
|
mkdtempSync,
|
||||||
|
rmSync,
|
||||||
|
writeFileSync,
|
||||||
|
} from "node:fs";
|
||||||
|
import { tmpdir } from "node:os";
|
||||||
|
import { join } from "node:path";
|
||||||
|
import { afterEach, describe, expect, test } from "vitest";
|
||||||
|
import { reconcileTaskFromSummary } from "../state-reconcile.js";
|
||||||
|
import {
|
||||||
|
closeDatabase,
|
||||||
|
getTask,
|
||||||
|
insertMilestone,
|
||||||
|
insertSlice,
|
||||||
|
insertTask,
|
||||||
|
openDatabase,
|
||||||
|
} from "../sf-db.js";
|
||||||
|
|
||||||
|
const tmpDirs = [];
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
closeDatabase();
|
||||||
|
while (tmpDirs.length > 0) {
|
||||||
|
const dir = tmpDirs.pop();
|
||||||
|
if (dir) rmSync(dir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function makeForgeProject() {
|
||||||
|
const dir = mkdtempSync(join(tmpdir(), "sf-state-reconcile-"));
|
||||||
|
tmpDirs.push(dir);
|
||||||
|
mkdirSync(join(dir, ".sf"), { recursive: true });
|
||||||
|
writeFileSync(
|
||||||
|
join(dir, "package.json"),
|
||||||
|
JSON.stringify({ name: "singularity-forge" }),
|
||||||
|
);
|
||||||
|
openDatabase(join(dir, ".sf", "sf.db"));
|
||||||
|
return dir;
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeTaskSummary(dir, milestoneId, sliceId, taskId, content) {
|
||||||
|
const taskDir = join(
|
||||||
|
dir,
|
||||||
|
".sf",
|
||||||
|
"milestones",
|
||||||
|
milestoneId,
|
||||||
|
"slices",
|
||||||
|
sliceId,
|
||||||
|
"tasks",
|
||||||
|
);
|
||||||
|
mkdirSync(taskDir, { recursive: true });
|
||||||
|
writeFileSync(join(taskDir, `${taskId}-SUMMARY.md`), content);
|
||||||
|
}
|
||||||
|
|
||||||
|
function seedPendingTask(milestoneId, sliceId, taskId) {
|
||||||
|
insertMilestone({ id: milestoneId, title: milestoneId, status: "active" });
|
||||||
|
insertSlice({
|
||||||
|
milestoneId,
|
||||||
|
id: sliceId,
|
||||||
|
title: sliceId,
|
||||||
|
status: "active",
|
||||||
|
risk: "medium",
|
||||||
|
sequence: 1,
|
||||||
|
});
|
||||||
|
insertTask({
|
||||||
|
milestoneId,
|
||||||
|
sliceId,
|
||||||
|
id: taskId,
|
||||||
|
title: `Task ${taskId}`,
|
||||||
|
status: "pending",
|
||||||
|
description: "",
|
||||||
|
estimate: "",
|
||||||
|
files: [],
|
||||||
|
sequence: 1,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const VALID_SUMMARY = `---
|
||||||
|
id: T02
|
||||||
|
parent: S05
|
||||||
|
milestone: M001-6377a4
|
||||||
|
key_files:
|
||||||
|
- docs/dev/drafts/sf-ace-patterns.md
|
||||||
|
key_decisions:
|
||||||
|
- (none)
|
||||||
|
duration:
|
||||||
|
verification_result: passed
|
||||||
|
completed_at: 2026-05-13T00:05:40.130Z
|
||||||
|
blocker_discovered: false
|
||||||
|
---
|
||||||
|
|
||||||
|
# T02: Finalized ACE Compatibility Reference Document
|
||||||
|
|
||||||
|
**ACE-compatible patterns reference complete.**
|
||||||
|
|
||||||
|
## What Happened
|
||||||
|
|
||||||
|
The doc was authored, audited, and parked under drafts/.
|
||||||
|
`;
|
||||||
|
|
||||||
|
describe("reconcileTaskFromSummary", () => {
|
||||||
|
test("applies frontmatter + body to a pending task row", () => {
|
||||||
|
const dir = makeForgeProject();
|
||||||
|
seedPendingTask("M001-6377a4", "S05", "T02");
|
||||||
|
writeTaskSummary(dir, "M001-6377a4", "S05", "T02", VALID_SUMMARY);
|
||||||
|
|
||||||
|
const result = reconcileTaskFromSummary(dir, "M001-6377a4", "S05", "T02");
|
||||||
|
|
||||||
|
expect(result.ok).toBe(true);
|
||||||
|
expect(result.applied.status).toBe("complete");
|
||||||
|
expect(result.applied.completedAt).toBe("2026-05-13T00:05:40.130Z");
|
||||||
|
expect(result.applied.verificationResult).toBe("passed");
|
||||||
|
expect(result.applied.blockerDiscovered).toBe(false);
|
||||||
|
expect(result.applied.keyFiles).toEqual([
|
||||||
|
"docs/dev/drafts/sf-ace-patterns.md",
|
||||||
|
]);
|
||||||
|
|
||||||
|
const updated = getTask("M001-6377a4", "S05", "T02");
|
||||||
|
expect(updated.status).toBe("complete");
|
||||||
|
expect(updated.completed_at).toBe("2026-05-13T00:05:40.130Z");
|
||||||
|
expect(updated.verification_result).toBe("passed");
|
||||||
|
// rowToTask parses the key_files JSON column into an array.
|
||||||
|
expect(updated.key_files).toEqual([
|
||||||
|
"docs/dev/drafts/sf-ace-patterns.md",
|
||||||
|
]);
|
||||||
|
expect(updated.full_summary_md).toContain("Finalized ACE Compatibility");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns summary-missing when no SUMMARY on disk", () => {
|
||||||
|
const dir = makeForgeProject();
|
||||||
|
seedPendingTask("M001-6377a4", "S05", "T02");
|
||||||
|
|
||||||
|
const result = reconcileTaskFromSummary(dir, "M001-6377a4", "S05", "T02");
|
||||||
|
expect(result).toEqual({ ok: false, reason: "summary-missing" });
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns task-not-in-db when row does not exist", () => {
|
||||||
|
const dir = makeForgeProject();
|
||||||
|
writeTaskSummary(dir, "M001-6377a4", "S05", "T02", VALID_SUMMARY);
|
||||||
|
|
||||||
|
const result = reconcileTaskFromSummary(dir, "M001-6377a4", "S05", "T02");
|
||||||
|
expect(result).toEqual({ ok: false, reason: "task-not-in-db" });
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns summary-invalid when file is empty/whitespace", () => {
|
||||||
|
const dir = makeForgeProject();
|
||||||
|
seedPendingTask("M001-6377a4", "S05", "T02");
|
||||||
|
writeTaskSummary(dir, "M001-6377a4", "S05", "T02", " \n ");
|
||||||
|
|
||||||
|
const result = reconcileTaskFromSummary(dir, "M001-6377a4", "S05", "T02");
|
||||||
|
expect(result.ok).toBe(false);
|
||||||
|
expect(result.reason).toBe("summary-invalid");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns already-done when task is already complete (no-op)", () => {
|
||||||
|
const dir = makeForgeProject();
|
||||||
|
insertMilestone({ id: "M001-6377a4", title: "M001", status: "active" });
|
||||||
|
insertSlice({
|
||||||
|
milestoneId: "M001-6377a4",
|
||||||
|
id: "S05",
|
||||||
|
title: "S05",
|
||||||
|
status: "active",
|
||||||
|
risk: "medium",
|
||||||
|
sequence: 1,
|
||||||
|
});
|
||||||
|
insertTask({
|
||||||
|
milestoneId: "M001-6377a4",
|
||||||
|
sliceId: "S05",
|
||||||
|
id: "T02",
|
||||||
|
title: "Already done",
|
||||||
|
status: "complete",
|
||||||
|
description: "",
|
||||||
|
estimate: "",
|
||||||
|
files: [],
|
||||||
|
sequence: 1,
|
||||||
|
});
|
||||||
|
writeTaskSummary(dir, "M001-6377a4", "S05", "T02", VALID_SUMMARY);
|
||||||
|
|
||||||
|
const result = reconcileTaskFromSummary(dir, "M001-6377a4", "S05", "T02");
|
||||||
|
expect(result.ok).toBe(false);
|
||||||
|
expect(result.reason).toBe("already-done");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("defaults completedAt to now when frontmatter is missing it", () => {
|
||||||
|
const dir = makeForgeProject();
|
||||||
|
seedPendingTask("M001-6377a4", "S05", "T02");
|
||||||
|
const minimal = `---
|
||||||
|
id: T02
|
||||||
|
verification_result: passed
|
||||||
|
---
|
||||||
|
|
||||||
|
# T02: Done
|
||||||
|
|
||||||
|
**Done.**
|
||||||
|
|
||||||
|
## What Happened
|
||||||
|
|
||||||
|
Done.
|
||||||
|
`;
|
||||||
|
writeTaskSummary(dir, "M001-6377a4", "S05", "T02", minimal);
|
||||||
|
|
||||||
|
const before = Date.now();
|
||||||
|
const result = reconcileTaskFromSummary(dir, "M001-6377a4", "S05", "T02");
|
||||||
|
const after = Date.now();
|
||||||
|
|
||||||
|
expect(result.ok).toBe(true);
|
||||||
|
const t = new Date(result.applied.completedAt).getTime();
|
||||||
|
expect(t).toBeGreaterThanOrEqual(before);
|
||||||
|
expect(t).toBeLessThanOrEqual(after);
|
||||||
|
});
|
||||||
|
});
|
||||||
Loading…
Add table
Reference in a new issue