fix: store backlog items in db
This commit is contained in:
parent
59cfc4f7c3
commit
9ceb0bf229
5 changed files with 322 additions and 81 deletions
|
|
@ -2,18 +2,33 @@
|
|||
* SF Command — /sf backlog
|
||||
*
|
||||
* Structured backlog management with 999.x numbering.
|
||||
* Items stored in .sf/WORK-QUEUE.md as markdown checklist.
|
||||
* Items live in `.sf/sf.db`; `.sf/WORK-QUEUE.md` is a legacy import fallback.
|
||||
* Items can be promoted to active slices via add-slice.
|
||||
*/
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { dirname, join } from "node:path";
|
||||
import { sfRoot } from "./paths.js";
|
||||
import { existsSync, mkdirSync, readFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import {
|
||||
addBacklogItem as addBacklogItemToDb,
|
||||
isDbAvailable,
|
||||
listBacklogItems,
|
||||
openDatabase,
|
||||
removeBacklogItem as removeBacklogItemFromDb,
|
||||
updateBacklogItemStatus,
|
||||
} from "./sf-db.js";
|
||||
|
||||
function backlogPath(basePath) {
|
||||
return join(sfRoot(basePath), "WORK-QUEUE.md");
|
||||
function ensureBacklogDb(basePath) {
|
||||
if (isDbAvailable()) return true;
|
||||
const sfDir = join(basePath, ".sf");
|
||||
mkdirSync(sfDir, { recursive: true });
|
||||
return openDatabase(join(sfDir, "sf.db"));
|
||||
}
|
||||
function parseBacklog(basePath) {
|
||||
const filePath = backlogPath(basePath);
|
||||
|
||||
function legacyBacklogPath(basePath) {
|
||||
return join(basePath, ".sf", "WORK-QUEUE.md");
|
||||
}
|
||||
|
||||
function parseLegacyBacklog(basePath) {
|
||||
const filePath = legacyBacklogPath(basePath);
|
||||
if (!existsSync(filePath)) return [];
|
||||
const content = readFileSync(filePath, "utf-8");
|
||||
const items = [];
|
||||
|
|
@ -25,38 +40,38 @@ function parseBacklog(basePath) {
|
|||
items.push({
|
||||
id: match[2],
|
||||
title: match[3].trim(),
|
||||
done: match[1] === "x",
|
||||
status: match[1] === "x" ? "promoted" : "pending",
|
||||
note: match[4] ?? "",
|
||||
});
|
||||
}
|
||||
}
|
||||
return items;
|
||||
}
|
||||
function writeBacklog(basePath, items) {
|
||||
const filePath = backlogPath(basePath);
|
||||
mkdirSync(dirname(filePath), { recursive: true });
|
||||
const lines = ["# Backlog\n"];
|
||||
for (const item of items) {
|
||||
const check = item.done ? "x" : " ";
|
||||
const note = item.note ? ` (${item.note})` : "";
|
||||
lines.push(`- [${check}] ${item.id} — ${item.title}${note}`);
|
||||
|
||||
function importLegacyBacklogIfNeeded(basePath) {
|
||||
if (listBacklogItems().length > 0) return 0;
|
||||
let imported = 0;
|
||||
for (const item of parseLegacyBacklog(basePath)) {
|
||||
addBacklogItemToDb({
|
||||
id: item.id,
|
||||
title: item.title,
|
||||
status: item.status,
|
||||
note: item.note,
|
||||
source: "legacy-work-queue",
|
||||
});
|
||||
imported += 1;
|
||||
}
|
||||
lines.push(""); // trailing newline
|
||||
writeFileSync(filePath, lines.join("\n"), "utf-8");
|
||||
return imported;
|
||||
}
|
||||
function nextBacklogId(items) {
|
||||
let maxNum = 0;
|
||||
for (const item of items) {
|
||||
const match = item.id.match(/^999\.(\d+)$/);
|
||||
if (match) {
|
||||
const num = parseInt(match[1], 10);
|
||||
if (num > maxNum) maxNum = num;
|
||||
}
|
||||
}
|
||||
return `999.${maxNum + 1}`;
|
||||
|
||||
function currentItems(basePath) {
|
||||
if (!ensureBacklogDb(basePath)) return parseLegacyBacklog(basePath);
|
||||
importLegacyBacklogIfNeeded(basePath);
|
||||
return listBacklogItems();
|
||||
}
|
||||
|
||||
async function listBacklog(basePath, ctx) {
|
||||
const items = parseBacklog(basePath);
|
||||
const items = currentItems(basePath);
|
||||
if (items.length === 0) {
|
||||
ctx.ui.notify(
|
||||
"Backlog is empty. Add items with /sf backlog add <title>",
|
||||
|
|
@ -66,31 +81,37 @@ async function listBacklog(basePath, ctx) {
|
|||
}
|
||||
const lines = ["Backlog:\n"];
|
||||
for (const item of items) {
|
||||
const status = item.done ? "✓" : "○";
|
||||
const done = item.status === "promoted" || item.status === "done";
|
||||
const status = done ? "✓" : "○";
|
||||
const note = item.note ? ` (${item.note})` : "";
|
||||
lines.push(` ${status} ${item.id} — ${item.title}${note}`);
|
||||
}
|
||||
const pending = items.filter((i) => !i.done).length;
|
||||
const pending = items.filter((i) => i.status === "pending").length;
|
||||
lines.push(`\n${pending} pending, ${items.length - pending} promoted/done`);
|
||||
ctx.ui.notify(lines.join("\n"), "info");
|
||||
}
|
||||
|
||||
async function addBacklogItem(basePath, title, ctx) {
|
||||
if (!title) {
|
||||
ctx.ui.notify("Usage: /sf backlog add <title>", "warning");
|
||||
return;
|
||||
}
|
||||
const items = parseBacklog(basePath);
|
||||
const id = nextBacklogId(items);
|
||||
if (!ensureBacklogDb(basePath)) {
|
||||
ctx.ui.notify("Backlog DB is unavailable; cannot add item.", "warning");
|
||||
return;
|
||||
}
|
||||
importLegacyBacklogIfNeeded(basePath);
|
||||
const cleanTitle = title.replace(/^['"]|['"]$/g, "");
|
||||
const date = new Date().toISOString().slice(0, 10);
|
||||
items.push({
|
||||
id,
|
||||
title: title.replace(/^['"]|['"]$/g, ""),
|
||||
done: false,
|
||||
const id = addBacklogItemToDb({
|
||||
title: cleanTitle,
|
||||
status: "pending",
|
||||
note: `added ${date}`,
|
||||
source: "manual",
|
||||
});
|
||||
writeBacklog(basePath, items);
|
||||
ctx.ui.notify(`Added ${id}: "${title}"`, "success");
|
||||
ctx.ui.notify(`Added ${id}: "${cleanTitle}"`, "success");
|
||||
}
|
||||
|
||||
async function promoteBacklogItem(basePath, itemId, ctx, _pi) {
|
||||
if (!itemId) {
|
||||
ctx.ui.notify(
|
||||
|
|
@ -99,41 +120,50 @@ async function promoteBacklogItem(basePath, itemId, ctx, _pi) {
|
|||
);
|
||||
return;
|
||||
}
|
||||
const items = parseBacklog(basePath);
|
||||
const item = items.find((i) => i.id === itemId);
|
||||
if (!ensureBacklogDb(basePath)) {
|
||||
ctx.ui.notify("Backlog DB is unavailable; cannot promote item.", "warning");
|
||||
return;
|
||||
}
|
||||
importLegacyBacklogIfNeeded(basePath);
|
||||
const item = listBacklogItems().find((i) => i.id === itemId);
|
||||
if (!item) {
|
||||
ctx.ui.notify(`Backlog item ${itemId} not found.`, "warning");
|
||||
return;
|
||||
}
|
||||
if (item.done) {
|
||||
if (item.status === "promoted" || item.status === "done") {
|
||||
ctx.ui.notify(`${itemId} is already promoted/done.`, "info");
|
||||
return;
|
||||
}
|
||||
// Promote — currently requires single-writer engine (not yet available)
|
||||
// Mark as promoted in backlog for now; slice creation will be available with the engine.
|
||||
item.done = true;
|
||||
item.note = `promoted ${new Date().toISOString().slice(0, 10)}`;
|
||||
writeBacklog(basePath, items);
|
||||
updateBacklogItemStatus(
|
||||
itemId,
|
||||
"promoted",
|
||||
`promoted ${new Date().toISOString().slice(0, 10)}`,
|
||||
);
|
||||
ctx.ui.notify(
|
||||
`Promoted ${itemId}: "${item.title}" — add it to the roadmap manually or wait for engine slice commands.`,
|
||||
`Promoted ${itemId}: "${item.title}" — add it to the roadmap manually or use the slice planning tools.`,
|
||||
"info",
|
||||
);
|
||||
}
|
||||
|
||||
async function removeBacklogItem(basePath, itemId, ctx) {
|
||||
if (!itemId) {
|
||||
ctx.ui.notify("Usage: /sf backlog remove <id>", "warning");
|
||||
return;
|
||||
}
|
||||
const items = parseBacklog(basePath);
|
||||
const idx = items.findIndex((i) => i.id === itemId);
|
||||
if (idx === -1) {
|
||||
if (!ensureBacklogDb(basePath)) {
|
||||
ctx.ui.notify("Backlog DB is unavailable; cannot remove item.", "warning");
|
||||
return;
|
||||
}
|
||||
importLegacyBacklogIfNeeded(basePath);
|
||||
const item = listBacklogItems().find((i) => i.id === itemId);
|
||||
if (!item) {
|
||||
ctx.ui.notify(`Backlog item ${itemId} not found.`, "warning");
|
||||
return;
|
||||
}
|
||||
const removed = items.splice(idx, 1)[0];
|
||||
writeBacklog(basePath, items);
|
||||
ctx.ui.notify(`Removed ${removed.id}: "${removed.title}"`, "success");
|
||||
removeBacklogItemFromDb(itemId);
|
||||
ctx.ui.notify(`Removed ${item.id}: "${item.title}"`, "success");
|
||||
}
|
||||
|
||||
export async function handleBacklog(args, ctx, pi) {
|
||||
const basePath = process.cwd();
|
||||
const parts = args.trim().split(/\s+/);
|
||||
|
|
@ -149,7 +179,6 @@ export async function handleBacklog(args, ctx, pi) {
|
|||
case "remove":
|
||||
return removeBacklogItem(basePath, rest.trim(), ctx);
|
||||
default:
|
||||
// Treat as implicit add
|
||||
return addBacklogItem(basePath, args, ctx);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import {
|
|||
import { dirname, join } from "node:path";
|
||||
import { projectRoot } from "./commands/context.js";
|
||||
import { sfRoot } from "./paths.js";
|
||||
import { addBacklogItem, isDbAvailable, openDatabase } from "./sf-db.js";
|
||||
|
||||
const _EMPTY_TODO = "# TODO\n\nDump anything here.\n";
|
||||
const MAX_DUMP_CHARS = 48_000;
|
||||
|
|
@ -276,17 +277,6 @@ function renderSkillProposals(result) {
|
|||
.join("\n") + "\n"
|
||||
);
|
||||
}
|
||||
function backlogPath(basePath) {
|
||||
return join(sfRoot(basePath), "WORK-QUEUE.md");
|
||||
}
|
||||
function nextBacklogId(content) {
|
||||
let maxNum = 0;
|
||||
for (const match of content.matchAll(/^- \[[ x]\] 999\.(\d+) — /gm)) {
|
||||
const num = Number.parseInt(match[1], 10);
|
||||
if (Number.isFinite(num) && num > maxNum) maxNum = num;
|
||||
}
|
||||
return `999.${maxNum + 1}`;
|
||||
}
|
||||
function renderBacklogJsonl(items, triagedAt) {
|
||||
return (
|
||||
items
|
||||
|
|
@ -308,22 +298,26 @@ function renderBacklogJsonl(items, triagedAt) {
|
|||
function appendBacklogItems(basePath, titles, triageRunId) {
|
||||
const cleanTitles = titles.map((title) => title.trim()).filter(Boolean);
|
||||
if (cleanTitles.length === 0) return 0;
|
||||
const filePath = backlogPath(basePath);
|
||||
mkdirSync(dirname(filePath), { recursive: true });
|
||||
let content = existsSync(filePath)
|
||||
? readFileSync(filePath, "utf-8")
|
||||
: "# Backlog\n\n";
|
||||
if (!content.endsWith("\n")) content += "\n";
|
||||
const date = new Date().toISOString().slice(0, 10);
|
||||
const triagedAt = new Date().toISOString();
|
||||
const backlogItems = [];
|
||||
for (const title of cleanTitles) {
|
||||
const id = nextBacklogId(content);
|
||||
content += `- [ ] ${id} — ${title.replace(/^['"]|['"]$/g, "")} (triaged ${date})\n`;
|
||||
backlogItems.push({ id, title: title.replace(/^['"]|['"]$/g, "") });
|
||||
if (!isDbAvailable()) {
|
||||
const root = sfRoot(basePath);
|
||||
mkdirSync(root, { recursive: true });
|
||||
openDatabase(join(root, "sf.db"));
|
||||
}
|
||||
writeFileSync(filePath, content, "utf-8");
|
||||
// Also write JSONL backlog entries
|
||||
for (const title of cleanTitles) {
|
||||
const cleanTitle = title.replace(/^['"]|['"]$/g, "");
|
||||
const id = addBacklogItem({
|
||||
title: cleanTitle,
|
||||
status: "pending",
|
||||
note: `triaged ${date}`,
|
||||
source: "todo-triage",
|
||||
triageRunId,
|
||||
});
|
||||
backlogItems.push({ id, title: cleanTitle });
|
||||
}
|
||||
// Also write versioned JSONL triage evidence; the executable backlog lives in DB.
|
||||
const backlogDir = join(basePath, ".sf", "triage", "backlog");
|
||||
mkdirSync(backlogDir, { recursive: true });
|
||||
const jsonlPath = join(backlogDir, `${triageRunId}.jsonl`);
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ function openRawDb(path) {
|
|||
loadProvider();
|
||||
return new DatabaseSync(path);
|
||||
}
|
||||
const SCHEMA_VERSION = 34;
|
||||
const SCHEMA_VERSION = 35;
|
||||
function indexExists(db, name) {
|
||||
return !!db
|
||||
.prepare(
|
||||
|
|
@ -140,6 +140,25 @@ function ensureRepoProfileTables(db) {
|
|||
"CREATE INDEX IF NOT EXISTS idx_repo_file_observations_status ON repo_file_observations(git_status, ownership)",
|
||||
);
|
||||
}
|
||||
function ensureBacklogTables(db) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS backlog_items (
|
||||
id TEXT PRIMARY KEY,
|
||||
title TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
note TEXT NOT NULL DEFAULT '',
|
||||
source TEXT NOT NULL DEFAULT '',
|
||||
triage_run_id TEXT DEFAULT NULL,
|
||||
sequence INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
promoted_at TEXT DEFAULT NULL
|
||||
)
|
||||
`);
|
||||
db.exec(
|
||||
"CREATE INDEX IF NOT EXISTS idx_backlog_items_status_sequence ON backlog_items(status, sequence, id)",
|
||||
);
|
||||
}
|
||||
function ensureSolverEvalTables(db) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS solver_eval_runs (
|
||||
|
|
@ -863,6 +882,7 @@ function initSchema(db, fileBacked) {
|
|||
"CREATE INDEX IF NOT EXISTS idx_self_feedback_kind ON self_feedback(kind, ts)",
|
||||
);
|
||||
ensureRepoProfileTables(db);
|
||||
ensureBacklogTables(db);
|
||||
ensureSolverEvalTables(db);
|
||||
ensureHeadlessRunTables(db);
|
||||
ensureUokMessageTables(db);
|
||||
|
|
@ -1925,6 +1945,15 @@ function migrateSchema(db) {
|
|||
":applied_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
if (currentVersion < 35) {
|
||||
ensureBacklogTables(db);
|
||||
db.prepare(
|
||||
"INSERT INTO schema_version (version, applied_at) VALUES (:version, :applied_at)",
|
||||
).run({
|
||||
":version": 35,
|
||||
":applied_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
db.exec("COMMIT");
|
||||
} catch (err) {
|
||||
db.exec("ROLLBACK");
|
||||
|
|
@ -3301,6 +3330,108 @@ export function getMilestone(id) {
|
|||
if (!row) return null;
|
||||
return rowToMilestone(row);
|
||||
}
|
||||
function rowToBacklogItem(row) {
|
||||
return {
|
||||
id: row["id"],
|
||||
title: row["title"],
|
||||
status: row["status"],
|
||||
note: row["note"] ?? "",
|
||||
source: row["source"] ?? "",
|
||||
triageRunId: row["triage_run_id"] ?? null,
|
||||
sequence: row["sequence"] ?? 0,
|
||||
createdAt: row["created_at"],
|
||||
updatedAt: row["updated_at"],
|
||||
promotedAt: row["promoted_at"] ?? null,
|
||||
};
|
||||
}
|
||||
export function listBacklogItems() {
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM backlog_items ORDER BY CASE WHEN sequence > 0 THEN 0 ELSE 1 END, sequence, id",
|
||||
)
|
||||
.all()
|
||||
.map(rowToBacklogItem);
|
||||
}
|
||||
export function nextBacklogItemId() {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
"SELECT id FROM backlog_items WHERE id LIKE '999.%' ORDER BY CAST(substr(id, 5) AS INTEGER) DESC LIMIT 1",
|
||||
)
|
||||
.get();
|
||||
const next = row?.id ? Number.parseInt(String(row.id).slice(4), 10) + 1 : 1;
|
||||
return `999.${Number.isFinite(next) ? next : 1}`;
|
||||
}
|
||||
export function addBacklogItem({
|
||||
id,
|
||||
title,
|
||||
note = "",
|
||||
source = "manual",
|
||||
triageRunId = null,
|
||||
status = "pending",
|
||||
}) {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const itemId = id ?? nextBacklogItemId();
|
||||
const now = new Date().toISOString();
|
||||
const sequenceRow = currentDb
|
||||
.prepare(
|
||||
"SELECT COALESCE(MAX(sequence), 0) + 1 AS sequence FROM backlog_items",
|
||||
)
|
||||
.get();
|
||||
currentDb
|
||||
.prepare(`INSERT INTO backlog_items (
|
||||
id, title, status, note, source, triage_run_id, sequence, created_at, updated_at, promoted_at
|
||||
) VALUES (
|
||||
:id, :title, :status, :note, :source, :triage_run_id, :sequence, :created_at, :updated_at, :promoted_at
|
||||
)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
title = excluded.title,
|
||||
status = excluded.status,
|
||||
note = excluded.note,
|
||||
source = excluded.source,
|
||||
triage_run_id = excluded.triage_run_id,
|
||||
updated_at = excluded.updated_at,
|
||||
promoted_at = excluded.promoted_at`)
|
||||
.run({
|
||||
":id": itemId,
|
||||
":title": title,
|
||||
":status": status,
|
||||
":note": note,
|
||||
":source": source,
|
||||
":triage_run_id": triageRunId,
|
||||
":sequence": sequenceRow?.sequence ?? 1,
|
||||
":created_at": now,
|
||||
":updated_at": now,
|
||||
":promoted_at": status === "promoted" ? now : null,
|
||||
});
|
||||
return itemId;
|
||||
}
|
||||
export function updateBacklogItemStatus(id, status, note = "") {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const now = new Date().toISOString();
|
||||
const result = currentDb
|
||||
.prepare(`UPDATE backlog_items
|
||||
SET status = :status,
|
||||
note = :note,
|
||||
updated_at = :updated_at,
|
||||
promoted_at = CASE WHEN :status = 'promoted' THEN :updated_at ELSE promoted_at END
|
||||
WHERE id = :id`)
|
||||
.run({
|
||||
":id": id,
|
||||
":status": status,
|
||||
":note": note,
|
||||
":updated_at": now,
|
||||
});
|
||||
return (result?.changes ?? 0) > 0;
|
||||
}
|
||||
export function removeBacklogItem(id) {
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const result = currentDb
|
||||
.prepare("DELETE FROM backlog_items WHERE id = :id")
|
||||
.run({ ":id": id });
|
||||
return (result?.changes ?? 0) > 0;
|
||||
}
|
||||
/**
|
||||
* Update a milestone's status in the database.
|
||||
* Used by park/unpark to keep the DB in sync with the filesystem marker.
|
||||
|
|
|
|||
87
src/resources/extensions/sf/tests/backlog-db.test.mjs
Normal file
87
src/resources/extensions/sf/tests/backlog-db.test.mjs
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
import assert from "node:assert/strict";
|
||||
import {
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
mkdtempSync,
|
||||
rmSync,
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { afterEach, test } from "vitest";
|
||||
import { handleBacklog } from "../commands-backlog.js";
|
||||
import { closeDatabase, listBacklogItems, openDatabase } from "../sf-db.js";
|
||||
|
||||
const tmpRoots = [];
|
||||
|
||||
afterEach(() => {
|
||||
closeDatabase();
|
||||
for (const root of tmpRoots.splice(0)) {
|
||||
rmSync(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
function makeProject() {
|
||||
const root = mkdtempSync(join(tmpdir(), "sf-backlog-db-"));
|
||||
tmpRoots.push(root);
|
||||
return root;
|
||||
}
|
||||
|
||||
function makeCtx(messages) {
|
||||
return {
|
||||
ui: {
|
||||
notify(message, level) {
|
||||
messages.push({ message, level });
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
test("backlog_add_when_db_available_persists_without_work_queue_markdown", async () => {
|
||||
const project = makeProject();
|
||||
mkdirSync(join(project, ".sf"), { recursive: true });
|
||||
openDatabase(join(project, ".sf", "sf.db"));
|
||||
const previousCwd = process.cwd();
|
||||
const messages = [];
|
||||
try {
|
||||
process.chdir(project);
|
||||
await handleBacklog("add Fix dispatch priority", makeCtx(messages), null);
|
||||
} finally {
|
||||
process.chdir(previousCwd);
|
||||
}
|
||||
|
||||
const items = listBacklogItems();
|
||||
assert.equal(items.length, 1);
|
||||
assert.equal(items[0].id, "999.1");
|
||||
assert.equal(items[0].title, "Fix dispatch priority");
|
||||
assert.equal(items[0].status, "pending");
|
||||
assert.equal(existsSync(join(project, ".sf", "WORK-QUEUE.md")), false);
|
||||
assert.equal(messages.at(-1).level, "success");
|
||||
});
|
||||
|
||||
test("backlog_list_when_legacy_work_queue_exists_imports_once_to_db", async () => {
|
||||
const project = makeProject();
|
||||
const sfDir = join(project, ".sf");
|
||||
mkdirSync(sfDir, { recursive: true });
|
||||
writeFileSync(
|
||||
join(sfDir, "WORK-QUEUE.md"),
|
||||
"# Backlog\n\n- [ ] 999.7 — Legacy item (added 2026-05-07)\n",
|
||||
"utf-8",
|
||||
);
|
||||
openDatabase(join(project, ".sf", "sf.db"));
|
||||
const previousCwd = process.cwd();
|
||||
const messages = [];
|
||||
try {
|
||||
process.chdir(project);
|
||||
await handleBacklog("", makeCtx(messages), null);
|
||||
await handleBacklog("", makeCtx(messages), null);
|
||||
} finally {
|
||||
process.chdir(previousCwd);
|
||||
}
|
||||
|
||||
const items = listBacklogItems();
|
||||
assert.equal(items.length, 1);
|
||||
assert.equal(items[0].id, "999.7");
|
||||
assert.equal(items[0].source, "legacy-work-queue");
|
||||
assert.match(messages.at(-1).message, /999\.7/);
|
||||
});
|
||||
|
|
@ -142,7 +142,7 @@ test("openDatabase_migrates_v27_tasks_without_created_at_through_spec_backfill",
|
|||
const version = db
|
||||
.prepare("SELECT MAX(version) AS version FROM schema_version")
|
||||
.get();
|
||||
assert.equal(version.version, 34);
|
||||
assert.equal(version.version, 35);
|
||||
const taskSpec = db
|
||||
.prepare(
|
||||
"SELECT milestone_id, slice_id, task_id, verify FROM task_specs WHERE task_id = 'T01'",
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue