refactor(db-first): migrate triage outputs and runtime counters to sf.db
- sf-db.js v52: triage_runs/evals/items/skills, runtime_counters, validation_attention_markers tables + CRUD functions - commands-todo.js: write triage evals/items/skills to DB instead of JSONL; keep markdown report as human artifact - auto-dispatch.js: rewrite-count + uat-count use runtime_counters table with file fallback; validation attention markers use DB with file fallback - migration test: bump expected schema version 51 → 52 - jsonl-schema-versioning.test.mjs: update triage test to assert DB rows Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
parent
3b249c4144
commit
a70004cf2a
5 changed files with 350 additions and 44 deletions
|
|
@ -81,10 +81,15 @@ import {
|
|||
getMilestoneSlices,
|
||||
getMilestoneValidationAssessment,
|
||||
getPendingGates,
|
||||
getRuntimeCounter,
|
||||
getSlice,
|
||||
getSliceTasks,
|
||||
getValidationAttentionMarker,
|
||||
incrementRuntimeCounter,
|
||||
isDbAvailable,
|
||||
markAllGatesOmitted,
|
||||
setRuntimeCounter,
|
||||
upsertValidationAttentionMarker,
|
||||
} from "./sf-db.js";
|
||||
import { isClosedStatus, isInactiveStatus } from "./status-guards.js";
|
||||
import { buildAuditEnvelope, emitUokAuditEvent } from "./uok/audit.js";
|
||||
|
|
@ -270,6 +275,7 @@ function rewriteCountPath(basePath) {
|
|||
return join(sfRoot(basePath), "runtime", "rewrite-count.json");
|
||||
}
|
||||
export function getRewriteCount(basePath) {
|
||||
if (isDbAvailable()) return getRuntimeCounter("rewrite-count");
|
||||
try {
|
||||
const data = JSON.parse(readFileSync(rewriteCountPath(basePath), "utf-8"));
|
||||
return typeof data.count === "number" ? data.count : 0;
|
||||
|
|
@ -278,6 +284,10 @@ export function getRewriteCount(basePath) {
|
|||
}
|
||||
}
|
||||
export function setRewriteCount(basePath, count) {
|
||||
if (isDbAvailable()) {
|
||||
setRuntimeCounter("rewrite-count", count);
|
||||
return;
|
||||
}
|
||||
const filePath = rewriteCountPath(basePath);
|
||||
mkdirSync(join(sfRoot(basePath), "runtime"), { recursive: true });
|
||||
writeFileSync(
|
||||
|
|
@ -293,6 +303,8 @@ function uatCountPath(basePath, mid, sid) {
|
|||
return join(sfRoot(basePath), "runtime", `uat-count-${mid}-${sid}.json`);
|
||||
}
|
||||
export function getUatCount(basePath, mid, sid) {
|
||||
const key = `uat-count:${mid}:${sid}`;
|
||||
if (isDbAvailable()) return getRuntimeCounter(key);
|
||||
try {
|
||||
const data = JSON.parse(
|
||||
readFileSync(uatCountPath(basePath, mid, sid), "utf-8"),
|
||||
|
|
@ -303,6 +315,8 @@ export function getUatCount(basePath, mid, sid) {
|
|||
}
|
||||
}
|
||||
export function incrementUatCount(basePath, mid, sid) {
|
||||
const key = `uat-count:${mid}:${sid}`;
|
||||
if (isDbAvailable()) return incrementRuntimeCounter(key);
|
||||
const count = getUatCount(basePath, mid, sid) + 1;
|
||||
const filePath = uatCountPath(basePath, mid, sid);
|
||||
mkdirSync(join(sfRoot(basePath), "runtime"), { recursive: true });
|
||||
|
|
@ -360,6 +374,7 @@ function parseValidationRemediationRound(content) {
|
|||
return Number.isFinite(round) ? round : null;
|
||||
}
|
||||
function readValidationAttentionMarker(basePath, mid) {
|
||||
if (isDbAvailable()) return getValidationAttentionMarker(mid);
|
||||
const markerPath = validationAttentionMarkerPath(basePath, mid);
|
||||
if (!existsSync(markerPath)) return null;
|
||||
try {
|
||||
|
|
@ -371,6 +386,10 @@ function readValidationAttentionMarker(basePath, mid) {
|
|||
}
|
||||
}
|
||||
function writeValidationAttentionMarker(basePath, mid, marker) {
|
||||
if (isDbAvailable()) {
|
||||
upsertValidationAttentionMarker(mid, marker);
|
||||
return;
|
||||
}
|
||||
mkdirSync(join(sfRoot(basePath), "runtime", "validation-attention"), {
|
||||
recursive: true,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -19,7 +19,14 @@ import {
|
|||
import { dirname, join } from "node:path";
|
||||
import { projectRoot } from "./commands/context.js";
|
||||
import { sfRoot } from "./paths.js";
|
||||
import { addBacklogItem, openDatabase } from "./sf-db.js";
|
||||
import {
|
||||
addBacklogItem,
|
||||
insertTriageEval,
|
||||
insertTriageItem,
|
||||
insertTriageRun,
|
||||
insertTriageSkill,
|
||||
openDatabase,
|
||||
} from "./sf-db.js";
|
||||
|
||||
const _EMPTY_TODO = "# TODO\n\nDump anything here.\n";
|
||||
const MAX_DUMP_CHARS = 48_000;
|
||||
|
|
@ -546,33 +553,23 @@ export async function triageTodoDump(basePath, llmCall, options = {}) {
|
|||
const createdAt = (options.date ?? new Date()).toISOString();
|
||||
const triageRoot = join(basePath, ".sf", "triage");
|
||||
const reportsDir = join(triageRoot, "reports");
|
||||
const evalsDir = join(triageRoot, "evals");
|
||||
const inboxDir = join(triageRoot, "inbox");
|
||||
const skillsDir = join(triageRoot, "skills");
|
||||
mkdirSync(reportsDir, { recursive: true });
|
||||
mkdirSync(evalsDir, { recursive: true });
|
||||
mkdirSync(inboxDir, { recursive: true });
|
||||
mkdirSync(skillsDir, { recursive: true });
|
||||
const markdownPath = join(reportsDir, `${id}.md`);
|
||||
const evalJsonlPath = join(evalsDir, `${id}.evals.jsonl`);
|
||||
const normalizedJsonlPath = join(inboxDir, `${id}.jsonl`);
|
||||
const skillJsonlPath = join(skillsDir, `${id}.skills.jsonl`);
|
||||
writeFileSync(markdownPath, renderTriageMarkdown(result, "TODO.md"));
|
||||
writeFileSync(evalJsonlPath, renderEvalJsonl(result));
|
||||
writeFileSync(normalizedJsonlPath, renderNormalizedJsonl(result, createdAt));
|
||||
writeFileSync(skillJsonlPath, renderSkillProposals(result));
|
||||
// Schema validation in CI mode
|
||||
if (options.ci) {
|
||||
const validations = [
|
||||
validateJsonlFile(evalJsonlPath, "eval"),
|
||||
validateJsonlFile(normalizedJsonlPath, "inbox"),
|
||||
validateJsonlFile(skillJsonlPath, "skill"),
|
||||
];
|
||||
for (const v of validations) {
|
||||
if (!v.ok) {
|
||||
throw new Error(`Schema validation failed for ${v.error}`);
|
||||
}
|
||||
}
|
||||
// Write triage results to DB (replaces JSONL files)
|
||||
const dbRoot = sfRoot(basePath);
|
||||
mkdirSync(dbRoot, { recursive: true });
|
||||
openDatabase(join(dbRoot, "sf.db"));
|
||||
insertTriageRun(id, join(basePath, "TODO.md"), createdAt);
|
||||
for (const item of result.eval_candidates) {
|
||||
insertTriageEval(crypto.randomUUID(), id, item, createdAt);
|
||||
}
|
||||
for (const item of normalizedItems(result, createdAt)) {
|
||||
insertTriageItem(crypto.randomUUID(), id, item.kind, item.content, item.evidence, createdAt);
|
||||
}
|
||||
const skillProposals = detectRecurringPatterns(result);
|
||||
for (const skill of skillProposals) {
|
||||
insertTriageSkill(crypto.randomUUID(), id, skill, createdAt);
|
||||
}
|
||||
const backlogItemsAdded =
|
||||
backlog === true
|
||||
|
|
@ -587,9 +584,9 @@ export async function triageTodoDump(basePath, llmCall, options = {}) {
|
|||
}
|
||||
return {
|
||||
markdownPath,
|
||||
evalJsonlPath,
|
||||
normalizedJsonlPath,
|
||||
skillJsonlPath,
|
||||
evalJsonlPath: null,
|
||||
normalizedJsonlPath: null,
|
||||
skillJsonlPath: null,
|
||||
backlogItemsAdded,
|
||||
result,
|
||||
skipped: false,
|
||||
|
|
|
|||
|
|
@ -244,7 +244,7 @@ function performDatabaseMaintenance(rawDb, path) {
|
|||
);
|
||||
}
|
||||
}
|
||||
const SCHEMA_VERSION = 49;
|
||||
const SCHEMA_VERSION = 52;
|
||||
function indexExists(db, name) {
|
||||
return !!db
|
||||
.prepare(
|
||||
|
|
@ -724,6 +724,82 @@ function ensureRetrievalEvidenceTables(db) {
|
|||
"CREATE INDEX IF NOT EXISTS idx_retrieval_evidence_status_recorded ON retrieval_evidence(status, recorded_at DESC)",
|
||||
);
|
||||
}
|
||||
function ensureTriageTables(db) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS triage_runs (
|
||||
id TEXT PRIMARY KEY,
|
||||
source_file TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'complete',
|
||||
result_summary_json TEXT,
|
||||
created_at TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS triage_evals (
|
||||
id TEXT PRIMARY KEY,
|
||||
run_id TEXT NOT NULL REFERENCES triage_runs(id),
|
||||
task_input TEXT NOT NULL,
|
||||
expected_behavior TEXT,
|
||||
evidence TEXT,
|
||||
failure_mode TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
created_at TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS triage_items (
|
||||
id TEXT PRIMARY KEY,
|
||||
run_id TEXT NOT NULL REFERENCES triage_runs(id),
|
||||
kind TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
evidence TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
created_at TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS triage_skills (
|
||||
id TEXT PRIMARY KEY,
|
||||
run_id TEXT NOT NULL REFERENCES triage_runs(id),
|
||||
name TEXT,
|
||||
description TEXT,
|
||||
trigger TEXT,
|
||||
raw_json TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
created_at TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
db.exec(
|
||||
"CREATE INDEX IF NOT EXISTS idx_triage_evals_run ON triage_evals(run_id)",
|
||||
);
|
||||
db.exec(
|
||||
"CREATE INDEX IF NOT EXISTS idx_triage_items_run_kind ON triage_items(run_id, kind)",
|
||||
);
|
||||
db.exec(
|
||||
"CREATE INDEX IF NOT EXISTS idx_triage_skills_run ON triage_skills(run_id)",
|
||||
);
|
||||
}
|
||||
function ensureRuntimeCounterTable(db) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS runtime_counters (
|
||||
key TEXT PRIMARY KEY,
|
||||
value INTEGER NOT NULL DEFAULT 0,
|
||||
updated_at TEXT NOT NULL
|
||||
)
|
||||
`);
|
||||
}
|
||||
function ensureValidationAttentionMarkersTable(db) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS validation_attention_markers (
|
||||
milestone_id TEXT PRIMARY KEY,
|
||||
created_at TEXT NOT NULL,
|
||||
source TEXT,
|
||||
remediation_round INTEGER,
|
||||
revalidation_round INTEGER,
|
||||
revalidation_requested_at TEXT
|
||||
)
|
||||
`);
|
||||
}
|
||||
function ensureSpecSchemaTables(db) {
|
||||
// Tier 1.3: Spec/Runtime/Evidence schema separation
|
||||
// Creates 9 normalized tables for milestone, slice, task entities
|
||||
|
|
@ -1376,6 +1452,9 @@ function initSchema(db, fileBacked) {
|
|||
ensureSpecSchemaTables(db);
|
||||
ensureTaskFrontmatterColumns(db);
|
||||
ensureRetrievalEvidenceTables(db);
|
||||
ensureTriageTables(db);
|
||||
ensureRuntimeCounterTable(db);
|
||||
ensureValidationAttentionMarkersTable(db);
|
||||
db.exec(
|
||||
`CREATE VIEW IF NOT EXISTS active_decisions AS SELECT * FROM decisions WHERE superseded_by IS NULL`,
|
||||
);
|
||||
|
|
@ -3010,6 +3089,19 @@ function migrateSchema(db) {
|
|||
":applied_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
if (currentVersion < 52) {
|
||||
// Add triage_runs/evals/items/skills, runtime_counters, and
|
||||
// validation_attention_markers tables — migrate JSONL structured state to DB.
|
||||
ensureTriageTables(db);
|
||||
ensureRuntimeCounterTable(db);
|
||||
ensureValidationAttentionMarkersTable(db);
|
||||
db.prepare(
|
||||
"INSERT INTO schema_version (version, applied_at) VALUES (:version, :applied_at)",
|
||||
).run({
|
||||
":version": 52,
|
||||
":applied_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
db.exec("COMMIT");
|
||||
} catch (err) {
|
||||
db.exec("ROLLBACK");
|
||||
|
|
@ -8047,3 +8139,198 @@ export function getValidationHistory(milestoneId, sliceId, taskId, limit = 20) {
|
|||
":limit": limit,
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Triage DB CRUD ───────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Insert a triage run record.
|
||||
* Purpose: replace .sf/triage/evals|inbox|skills JSONL files with queryable DB rows.
|
||||
* Consumer: commands-todo.js triageTodoDump after successful triage.
|
||||
*/
|
||||
export function insertTriageRun(id, sourceFile, createdAt) {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO triage_runs (id, source_file, status, created_at)
|
||||
VALUES (:id, :source_file, 'complete', :created_at)
|
||||
ON CONFLICT(id) DO NOTHING`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":source_file": sourceFile ?? null,
|
||||
":created_at": createdAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a triage eval candidate row.
|
||||
* Purpose: store eval candidates in DB instead of .evals.jsonl.
|
||||
* Consumer: commands-todo.js triageTodoDump.
|
||||
*/
|
||||
export function insertTriageEval(id, runId, data, createdAt) {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO triage_evals (id, run_id, task_input, expected_behavior, evidence, failure_mode, status, created_at)
|
||||
VALUES (:id, :run_id, :task_input, :expected_behavior, :evidence, :failure_mode, 'pending', :created_at)
|
||||
ON CONFLICT(id) DO NOTHING`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":run_id": runId,
|
||||
":task_input": data.task_input ?? "",
|
||||
":expected_behavior": data.expected_behavior ?? "",
|
||||
":evidence": data.evidence ?? null,
|
||||
":failure_mode": data.failure_mode ?? null,
|
||||
":created_at": createdAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a normalized triage inbox item row.
|
||||
* Purpose: store triage inbox items (eval_candidate, implementation_task, etc.) in DB.
|
||||
* Consumer: commands-todo.js triageTodoDump.
|
||||
*/
|
||||
export function insertTriageItem(id, runId, kind, content, evidence, createdAt) {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO triage_items (id, run_id, kind, content, evidence, status, created_at)
|
||||
VALUES (:id, :run_id, :kind, :content, :evidence, 'pending', :created_at)
|
||||
ON CONFLICT(id) DO NOTHING`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":run_id": runId,
|
||||
":kind": kind,
|
||||
":content": content,
|
||||
":evidence": evidence ?? null,
|
||||
":created_at": createdAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a triage skill proposal row.
|
||||
* Purpose: store skill proposals in DB instead of .skills.jsonl.
|
||||
* Consumer: commands-todo.js triageTodoDump.
|
||||
*/
|
||||
export function insertTriageSkill(id, runId, data, createdAt) {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO triage_skills (id, run_id, name, description, trigger, raw_json, status, created_at)
|
||||
VALUES (:id, :run_id, :name, :description, :trigger, :raw_json, 'pending', :created_at)
|
||||
ON CONFLICT(id) DO NOTHING`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":run_id": runId,
|
||||
":name": data.title ?? data.name ?? null,
|
||||
":description": data.description ?? null,
|
||||
":trigger": data.trigger_pattern ?? data.trigger ?? null,
|
||||
":raw_json": JSON.stringify(data),
|
||||
":created_at": createdAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Runtime Counters ─────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Get a runtime counter value by key. Returns 0 if the key does not exist.
|
||||
* Purpose: replace per-key JSON files in .sf/runtime/ with queryable DB rows.
|
||||
* Consumer: auto-dispatch.js rewrite-count and uat-count logic.
|
||||
*/
|
||||
export function getRuntimeCounter(key) {
|
||||
if (!currentDb) return 0;
|
||||
const row = currentDb
|
||||
.prepare("SELECT value FROM runtime_counters WHERE key = ?")
|
||||
.get(key);
|
||||
return typeof row?.value === "number" ? row.value : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a runtime counter to an explicit value.
|
||||
* Purpose: replace JSON file writes for named counters.
|
||||
* Consumer: auto-dispatch.js setRewriteCount.
|
||||
*/
|
||||
export function setRuntimeCounter(key, value) {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO runtime_counters (key, value, updated_at)
|
||||
VALUES (:key, :value, :updated_at)
|
||||
ON CONFLICT(key) DO UPDATE SET value = excluded.value, updated_at = excluded.updated_at`,
|
||||
)
|
||||
.run({
|
||||
":key": key,
|
||||
":value": value,
|
||||
":updated_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Atomically increment a runtime counter and return the new value.
|
||||
* Purpose: replace read-modify-write JSON file pattern for counters.
|
||||
* Consumer: auto-dispatch.js incrementUatCount.
|
||||
*/
|
||||
export function incrementRuntimeCounter(key) {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO runtime_counters (key, value, updated_at)
|
||||
VALUES (:key, 1, :updated_at)
|
||||
ON CONFLICT(key) DO UPDATE SET value = value + 1, updated_at = excluded.updated_at`,
|
||||
)
|
||||
.run({ ":key": key, ":updated_at": new Date().toISOString() });
|
||||
const row = currentDb
|
||||
.prepare("SELECT value FROM runtime_counters WHERE key = ?")
|
||||
.get(key);
|
||||
return typeof row?.value === "number" ? row.value : 1;
|
||||
}
|
||||
|
||||
// ─── Validation Attention Markers ─────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Get a validation attention marker for a milestone, or null if absent.
|
||||
* Purpose: replace .sf/runtime/validation-attention/{mid}.json reads.
|
||||
* Consumer: auto-dispatch.js hasActiveValidationAttentionMarker.
|
||||
*/
|
||||
export function getValidationAttentionMarker(milestoneId) {
|
||||
if (!currentDb) return null;
|
||||
return (
|
||||
currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM validation_attention_markers WHERE milestone_id = ?",
|
||||
)
|
||||
.get(milestoneId) ?? null
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Upsert a validation attention marker for a milestone.
|
||||
* Purpose: replace .sf/runtime/validation-attention/{mid}.json writes.
|
||||
* Consumer: auto-dispatch.js writeValidationAttentionMarker.
|
||||
*/
|
||||
export function upsertValidationAttentionMarker(milestoneId, marker) {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const now = new Date().toISOString();
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO validation_attention_markers
|
||||
(milestone_id, created_at, source, remediation_round, revalidation_round, revalidation_requested_at)
|
||||
VALUES (:milestone_id, :created_at, :source, :remediation_round, :revalidation_round, :revalidation_requested_at)
|
||||
ON CONFLICT(milestone_id) DO UPDATE SET
|
||||
source = excluded.source,
|
||||
remediation_round = excluded.remediation_round,
|
||||
revalidation_round = excluded.revalidation_round,
|
||||
revalidation_requested_at = excluded.revalidation_requested_at`,
|
||||
)
|
||||
.run({
|
||||
":milestone_id": milestoneId,
|
||||
":created_at": marker.createdAt ?? now,
|
||||
":source": marker.source ?? null,
|
||||
":remediation_round": marker.remediationRound ?? null,
|
||||
":revalidation_round": marker.revalidationRound ?? null,
|
||||
":revalidation_requested_at": marker.revalidationRequestedAt ?? null,
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ import { emitJournalEvent, queryJournal } from "../journal.js";
|
|||
import { readJudgmentLog } from "../judgment-log.js";
|
||||
import { ModelLearner } from "../model-learner.js";
|
||||
import { createScheduleStore } from "../schedule/schedule-store.js";
|
||||
import { closeDatabase } from "../sf-db.js";
|
||||
import { closeDatabase, getDatabase } from "../sf-db.js";
|
||||
import { buildAuditEnvelope, emitUokAuditEvent } from "../uok/audit.js";
|
||||
import {
|
||||
parseParityEvents,
|
||||
|
|
@ -304,7 +304,7 @@ describe("SF JSONL schema versioning", () => {
|
|||
assert.equal(summary.reasons.quality, 1);
|
||||
});
|
||||
|
||||
test("todo_triage_jsonl_outputs_write_schema_versions", async () => {
|
||||
test("todo_triage_outputs_written_to_db", async () => {
|
||||
const project = makeProject();
|
||||
writeFileSync(join(project, "TODO.md"), "# TODO\n\nmake evals real\n");
|
||||
const response = JSON.stringify({
|
||||
|
|
@ -330,25 +330,28 @@ describe("SF JSONL schema versioning", () => {
|
|||
unclear_notes: [],
|
||||
});
|
||||
|
||||
const result = await triageTodoDump(project, async () => response, {
|
||||
await triageTodoDump(project, async () => response, {
|
||||
clear: false,
|
||||
backlog: true,
|
||||
date: new Date("2026-05-07T01:02:03.000Z"),
|
||||
});
|
||||
|
||||
const db = getDatabase();
|
||||
assert.ok(db, "database should be open after triage");
|
||||
const evalRows = db.prepare("SELECT * FROM triage_evals").all();
|
||||
assert.ok(evalRows.length >= 2, "should have eval rows in DB");
|
||||
const itemRows = db.prepare("SELECT * FROM triage_items").all();
|
||||
assert.ok(itemRows.length > 0, "should have item rows in DB");
|
||||
const runRows = db.prepare("SELECT * FROM triage_runs").all();
|
||||
assert.ok(runRows.length > 0, "should have triage run in DB");
|
||||
|
||||
// Backlog JSONL evidence file is still written (it is a human-readable artifact)
|
||||
const backlogDir = join(project, ".sf", "triage", "backlog");
|
||||
const [backlogFile] = readdirSync(backlogDir).filter((file) =>
|
||||
file.endsWith(".jsonl"),
|
||||
);
|
||||
|
||||
for (const path of [
|
||||
result.evalJsonlPath,
|
||||
result.normalizedJsonlPath,
|
||||
result.skillJsonlPath,
|
||||
join(backlogDir, backlogFile),
|
||||
]) {
|
||||
const rows = readJsonl(path);
|
||||
assert.ok(rows.length > 0, `${path} should contain rows`);
|
||||
for (const row of rows) assert.equal(row.schemaVersion, 1);
|
||||
}
|
||||
const backlogRows = readJsonl(join(backlogDir, backlogFile));
|
||||
assert.ok(backlogRows.length > 0, "backlog JSONL should contain rows");
|
||||
for (const row of backlogRows) assert.equal(row.schemaVersion, 1);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -223,7 +223,7 @@ test("openDatabase_migrates_v27_tasks_without_created_at_through_spec_backfill",
|
|||
const version = db
|
||||
.prepare("SELECT MAX(version) AS version FROM schema_version")
|
||||
.get();
|
||||
assert.equal(version.version, 51);
|
||||
assert.equal(version.version, 52);
|
||||
const taskSpec = db
|
||||
.prepare(
|
||||
"SELECT milestone_id, slice_id, task_id, verify FROM task_specs WHERE task_id = 'T01'",
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue