refactor(sf-ext): split sf-db.js (9073 lines) into 18 domain modules
sf-db.js is now a pure barrel re-export. All logic lives in sf-db/: - sf-db-core.js — adapter, schema, transactions, shared helpers - sf-db-mode-state.js — Ask/Build/YOLO mode state - sf-db-decisions.js — ADR / decision records - sf-db-artifacts.js — file artifacts and attachments - sf-db-milestones.js — milestone CRUD - sf-db-slices.js — slice CRUD - sf-db-tasks.js — task CRUD - sf-db-worktree.js — worktree state - sf-db-evidence.js — retrieval evidence - sf-db-spec.js — spec/contract records - sf-db-gates.js — UOK gate records - sf-db-uok.js — unit-of-knowledge state - sf-db-session-store — session store / FTS - sf-db-backlog.js — backlog items - sf-db-learning.js — model learning / performance - sf-db-memory.js — memory / embeddings - sf-db-profile.js — user profile - sf-db-self-feedback — self-feedback triage sf-db/index.js re-exports sf-db.js for backward compat. All 4375 tests pass. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
parent
756355abf1
commit
90dc3c6798
19 changed files with 8830 additions and 9072 deletions
File diff suppressed because it is too large
Load diff
49
src/resources/extensions/sf/sf-db/sf-db-artifacts.js
Normal file
49
src/resources/extensions/sf/sf-db/sf-db-artifacts.js
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import { _getAdapter, rowToArtifact } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { logWarning } from '../workflow-logger.js';
|
||||
|
||||
export function clearArtifacts() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
try {
|
||||
currentDb.exec("DELETE FROM artifacts");
|
||||
} catch (e) {
|
||||
logWarning("db", `clearArtifacts failed: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function insertArtifact(a) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR REPLACE INTO artifacts (path, artifact_type, milestone_id, slice_id, task_id, full_content, imported_at)
|
||||
VALUES (:path, :artifact_type, :milestone_id, :slice_id, :task_id, :full_content, :imported_at)`)
|
||||
.run({
|
||||
":path": a.path,
|
||||
":artifact_type": a.artifact_type,
|
||||
":milestone_id": a.milestone_id,
|
||||
":slice_id": a.slice_id,
|
||||
":task_id": a.task_id,
|
||||
":full_content": a.full_content,
|
||||
":imported_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function getArtifact(path) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare("SELECT * FROM artifacts WHERE path = :path")
|
||||
.get({ ":path": path });
|
||||
if (!row) return null;
|
||||
return rowToArtifact(row);
|
||||
}
|
||||
|
||||
export function deleteArtifactByPath(path) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare("DELETE FROM artifacts WHERE path = :path")
|
||||
.run({ ":path": path });
|
||||
}
|
||||
|
||||
101
src/resources/extensions/sf/sf-db/sf-db-backlog.js
Normal file
101
src/resources/extensions/sf/sf-db/sf-db-backlog.js
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
import { _getAdapter, rowToBacklogItem } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
|
||||
export function listBacklogItems() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM backlog_items ORDER BY CASE WHEN sequence > 0 THEN 0 ELSE 1 END, sequence, id",
|
||||
)
|
||||
.all()
|
||||
.map(rowToBacklogItem);
|
||||
}
|
||||
|
||||
export function nextBacklogItemId() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
"SELECT id FROM backlog_items WHERE id LIKE '999.%' ORDER BY CAST(substr(id, 5) AS INTEGER) DESC LIMIT 1",
|
||||
)
|
||||
.get();
|
||||
const next = row?.id ? Number.parseInt(String(row.id).slice(4), 10) + 1 : 1;
|
||||
return `999.${Number.isFinite(next) ? next : 1}`;
|
||||
}
|
||||
|
||||
export function addBacklogItem({
|
||||
id,
|
||||
title,
|
||||
note = "",
|
||||
source = "manual",
|
||||
triageRunId = null,
|
||||
status = "pending",
|
||||
}) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const itemId = id ?? nextBacklogItemId();
|
||||
const now = new Date().toISOString();
|
||||
const sequenceRow = currentDb
|
||||
.prepare(
|
||||
"SELECT COALESCE(MAX(sequence), 0) + 1 AS sequence FROM backlog_items",
|
||||
)
|
||||
.get();
|
||||
currentDb
|
||||
.prepare(`INSERT INTO backlog_items (
|
||||
id, title, status, note, source, triage_run_id, sequence, created_at, updated_at, promoted_at
|
||||
) VALUES (
|
||||
:id, :title, :status, :note, :source, :triage_run_id, :sequence, :created_at, :updated_at, :promoted_at
|
||||
)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
title = excluded.title,
|
||||
status = excluded.status,
|
||||
note = excluded.note,
|
||||
source = excluded.source,
|
||||
triage_run_id = excluded.triage_run_id,
|
||||
updated_at = excluded.updated_at,
|
||||
promoted_at = excluded.promoted_at`)
|
||||
.run({
|
||||
":id": itemId,
|
||||
":title": title,
|
||||
":status": status,
|
||||
":note": note,
|
||||
":source": source,
|
||||
":triage_run_id": triageRunId,
|
||||
":sequence": sequenceRow?.sequence ?? 1,
|
||||
":created_at": now,
|
||||
":updated_at": now,
|
||||
":promoted_at": status === "promoted" ? now : null,
|
||||
});
|
||||
return itemId;
|
||||
}
|
||||
|
||||
export function updateBacklogItemStatus(id, status, note = "") {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const now = new Date().toISOString();
|
||||
const result = currentDb
|
||||
.prepare(`UPDATE backlog_items
|
||||
SET status = :status,
|
||||
note = :note,
|
||||
updated_at = :updated_at,
|
||||
promoted_at = CASE WHEN :status = 'promoted' THEN :updated_at ELSE promoted_at END
|
||||
WHERE id = :id`)
|
||||
.run({
|
||||
":id": id,
|
||||
":status": status,
|
||||
":note": note,
|
||||
":updated_at": now,
|
||||
});
|
||||
return (result?.changes ?? 0) > 0;
|
||||
}
|
||||
|
||||
export function removeBacklogItem(id) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const result = currentDb
|
||||
.prepare("DELETE FROM backlog_items WHERE id = :id")
|
||||
.run({ ":id": id });
|
||||
return (result?.changes ?? 0) > 0;
|
||||
}
|
||||
|
||||
4140
src/resources/extensions/sf/sf-db/sf-db-core.js
Normal file
4140
src/resources/extensions/sf/sf-db/sf-db-core.js
Normal file
File diff suppressed because it is too large
Load diff
191
src/resources/extensions/sf/sf-db/sf-db-decisions.js
Normal file
191
src/resources/extensions/sf/sf-db/sf-db-decisions.js
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
import { _getAdapter } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
|
||||
export function insertDecision(d) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT INTO decisions (id, when_context, scope, decision, choice, rationale, revisable, made_by, superseded_by)
|
||||
VALUES (:id, :when_context, :scope, :decision, :choice, :rationale, :revisable, :made_by, :superseded_by)`)
|
||||
.run({
|
||||
":id": d.id,
|
||||
":when_context": d.when_context,
|
||||
":scope": d.scope,
|
||||
":decision": d.decision,
|
||||
":choice": d.choice,
|
||||
":rationale": d.rationale,
|
||||
":revisable": d.revisable,
|
||||
":made_by": d.made_by ?? "agent",
|
||||
":superseded_by": d.superseded_by,
|
||||
});
|
||||
}
|
||||
|
||||
export function getDecisionById(id) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb.prepare("SELECT * FROM decisions WHERE id = ?").get(id);
|
||||
if (!row) return null;
|
||||
return {
|
||||
seq: row["seq"],
|
||||
id: row["id"],
|
||||
when_context: row["when_context"],
|
||||
scope: row["scope"],
|
||||
decision: row["decision"],
|
||||
choice: row["choice"],
|
||||
rationale: row["rationale"],
|
||||
revisable: row["revisable"],
|
||||
made_by: row["made_by"] ?? "agent",
|
||||
superseded_by: row["superseded_by"] ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
export function getActiveDecisions() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const rows = currentDb.prepare("SELECT * FROM active_decisions").all();
|
||||
return rows.map((row) => ({
|
||||
seq: row["seq"],
|
||||
id: row["id"],
|
||||
when_context: row["when_context"],
|
||||
scope: row["scope"],
|
||||
decision: row["decision"],
|
||||
choice: row["choice"],
|
||||
rationale: row["rationale"],
|
||||
revisable: row["revisable"],
|
||||
made_by: row["made_by"] ?? "agent",
|
||||
superseded_by: null,
|
||||
}));
|
||||
}
|
||||
|
||||
export function upsertDecision(d) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
// Use ON CONFLICT DO UPDATE instead of INSERT OR REPLACE to preserve the
|
||||
// seq column. INSERT OR REPLACE deletes then reinserts, resetting seq and
|
||||
// corrupting decision ordering in DECISIONS.md after reconcile replay.
|
||||
currentDb
|
||||
.prepare(`INSERT INTO decisions (id, when_context, scope, decision, choice, rationale, revisable, made_by, superseded_by)
|
||||
VALUES (:id, :when_context, :scope, :decision, :choice, :rationale, :revisable, :made_by, :superseded_by)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
when_context = excluded.when_context,
|
||||
scope = excluded.scope,
|
||||
decision = excluded.decision,
|
||||
choice = excluded.choice,
|
||||
rationale = excluded.rationale,
|
||||
revisable = excluded.revisable,
|
||||
made_by = excluded.made_by,
|
||||
superseded_by = excluded.superseded_by`)
|
||||
.run({
|
||||
":id": d.id,
|
||||
":when_context": d.when_context,
|
||||
":scope": d.scope,
|
||||
":decision": d.decision,
|
||||
":choice": d.choice,
|
||||
":rationale": d.rationale,
|
||||
":revisable": d.revisable,
|
||||
":made_by": d.made_by ?? "agent",
|
||||
":superseded_by": d.superseded_by ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
export function deleteDecisionById(id) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb.prepare("DELETE FROM decisions WHERE id = :id").run({ ":id": id });
|
||||
}
|
||||
|
||||
export function insertRequirement(r) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT INTO requirements (id, class, status, description, why, source, primary_owner, supporting_slices, validation, notes, full_content, superseded_by)
|
||||
VALUES (:id, :class, :status, :description, :why, :source, :primary_owner, :supporting_slices, :validation, :notes, :full_content, :superseded_by)`)
|
||||
.run({
|
||||
":id": r.id,
|
||||
":class": r.class,
|
||||
":status": r.status,
|
||||
":description": r.description,
|
||||
":why": r.why,
|
||||
":source": r.source,
|
||||
":primary_owner": r.primary_owner,
|
||||
":supporting_slices": r.supporting_slices,
|
||||
":validation": r.validation,
|
||||
":notes": r.notes,
|
||||
":full_content": r.full_content,
|
||||
":superseded_by": r.superseded_by,
|
||||
});
|
||||
}
|
||||
|
||||
export function getRequirementById(id) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare("SELECT * FROM requirements WHERE id = ?")
|
||||
.get(id);
|
||||
if (!row) return null;
|
||||
return {
|
||||
id: row["id"],
|
||||
class: row["class"],
|
||||
status: row["status"],
|
||||
description: row["description"],
|
||||
why: row["why"],
|
||||
source: row["source"],
|
||||
primary_owner: row["primary_owner"],
|
||||
supporting_slices: row["supporting_slices"],
|
||||
validation: row["validation"],
|
||||
notes: row["notes"],
|
||||
full_content: row["full_content"],
|
||||
superseded_by: row["superseded_by"] ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
export function getActiveRequirements() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const rows = currentDb.prepare("SELECT * FROM active_requirements").all();
|
||||
return rows.map((row) => ({
|
||||
id: row["id"],
|
||||
class: row["class"],
|
||||
status: row["status"],
|
||||
description: row["description"],
|
||||
why: row["why"],
|
||||
source: row["source"],
|
||||
primary_owner: row["primary_owner"],
|
||||
supporting_slices: row["supporting_slices"],
|
||||
validation: row["validation"],
|
||||
notes: row["notes"],
|
||||
full_content: row["full_content"],
|
||||
superseded_by: null,
|
||||
}));
|
||||
}
|
||||
|
||||
export function upsertRequirement(r) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR REPLACE INTO requirements (id, class, status, description, why, source, primary_owner, supporting_slices, validation, notes, full_content, superseded_by)
|
||||
VALUES (:id, :class, :status, :description, :why, :source, :primary_owner, :supporting_slices, :validation, :notes, :full_content, :superseded_by)`)
|
||||
.run({
|
||||
":id": r.id,
|
||||
":class": r.class,
|
||||
":status": r.status,
|
||||
":description": r.description,
|
||||
":why": r.why,
|
||||
":source": r.source,
|
||||
":primary_owner": r.primary_owner,
|
||||
":supporting_slices": r.supporting_slices,
|
||||
":validation": r.validation,
|
||||
":notes": r.notes,
|
||||
":full_content": r.full_content,
|
||||
":superseded_by": r.superseded_by ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
export function deleteRequirementById(id) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare("DELETE FROM requirements WHERE id = :id")
|
||||
.run({ ":id": id });
|
||||
}
|
||||
|
||||
259
src/resources/extensions/sf/sf-db/sf-db-evidence.js
Normal file
259
src/resources/extensions/sf/sf-db/sf-db-evidence.js
Normal file
|
|
@ -0,0 +1,259 @@
|
|||
import { _getAdapter } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { logWarning } from '../workflow-logger.js';
|
||||
|
||||
export function insertVerificationEvidence(e) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR IGNORE INTO verification_evidence (task_id, slice_id, milestone_id, command, exit_code, verdict, duration_ms, created_at)
|
||||
VALUES (:task_id, :slice_id, :milestone_id, :command, :exit_code, :verdict, :duration_ms, :created_at)`)
|
||||
.run({
|
||||
":task_id": e.taskId,
|
||||
":slice_id": e.sliceId,
|
||||
":milestone_id": e.milestoneId,
|
||||
":command": e.command,
|
||||
":exit_code": e.exitCode,
|
||||
":verdict": e.verdict,
|
||||
":duration_ms": e.durationMs,
|
||||
":created_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function getVerificationEvidence(milestoneId, sliceId, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM verification_evidence WHERE milestone_id = :mid AND slice_id = :sid AND task_id = :tid ORDER BY id",
|
||||
)
|
||||
.all({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId });
|
||||
return rows;
|
||||
}
|
||||
|
||||
export function deleteVerificationEvidence(milestoneId, sliceId, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`DELETE FROM verification_evidence WHERE milestone_id = :mid AND slice_id = :sid AND task_id = :tid`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId });
|
||||
}
|
||||
|
||||
export function insertMilestoneEvidence(
|
||||
milestoneId,
|
||||
evidenceType,
|
||||
content,
|
||||
phaseName,
|
||||
recordedBy,
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT INTO milestone_evidence (milestone_id, evidence_type, content, recorded_at, phase_name, recorded_by)
|
||||
VALUES (?, ?, ?, ?, ?, ?)`)
|
||||
.run(
|
||||
milestoneId,
|
||||
evidenceType,
|
||||
content,
|
||||
new Date().toISOString(),
|
||||
phaseName || "",
|
||||
recordedBy || "",
|
||||
);
|
||||
}
|
||||
|
||||
export function insertSliceEvidence(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
evidenceType,
|
||||
content,
|
||||
phaseName,
|
||||
recordedBy,
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT INTO slice_evidence (milestone_id, slice_id, evidence_type, content, recorded_at, phase_name, recorded_by)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)`)
|
||||
.run(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
evidenceType,
|
||||
content,
|
||||
new Date().toISOString(),
|
||||
phaseName || "",
|
||||
recordedBy || "",
|
||||
);
|
||||
}
|
||||
|
||||
export function insertTaskEvidence(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
taskId,
|
||||
evidenceType,
|
||||
content,
|
||||
phaseName,
|
||||
recordedBy,
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT INTO task_evidence (milestone_id, slice_id, task_id, evidence_type, content, recorded_at, phase_name, recorded_by)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`)
|
||||
.run(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
taskId,
|
||||
evidenceType,
|
||||
content,
|
||||
new Date().toISOString(),
|
||||
phaseName || "",
|
||||
recordedBy || "",
|
||||
);
|
||||
}
|
||||
|
||||
export function getMilestoneAuditTrail(milestoneId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(`
|
||||
SELECT
|
||||
r.id, r.title, r.status,
|
||||
s.vision, s.spec_version,
|
||||
e.evidence_type, e.content, e.recorded_at, e.phase_name, e.recorded_by
|
||||
FROM milestones r
|
||||
LEFT JOIN milestone_specs s ON r.id = s.id
|
||||
LEFT JOIN milestone_evidence e ON r.id = e.milestone_id
|
||||
WHERE r.id = ?
|
||||
ORDER BY e.recorded_at ASC
|
||||
`)
|
||||
.all(milestoneId);
|
||||
}
|
||||
|
||||
export function getSliceAuditTrail(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(`
|
||||
SELECT
|
||||
r.id, r.title, r.status,
|
||||
s.goal, s.spec_version,
|
||||
e.evidence_type, e.content, e.recorded_at, e.phase_name, e.recorded_by
|
||||
FROM slices r
|
||||
LEFT JOIN slice_specs s ON r.milestone_id = s.milestone_id AND r.id = s.slice_id
|
||||
LEFT JOIN slice_evidence e ON r.milestone_id = e.milestone_id AND r.id = e.slice_id
|
||||
WHERE r.milestone_id = ? AND r.id = ?
|
||||
ORDER BY e.recorded_at ASC
|
||||
`)
|
||||
.all(milestoneId, sliceId);
|
||||
}
|
||||
|
||||
export function getTaskAuditTrail(milestoneId, sliceId, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(`
|
||||
SELECT
|
||||
r.id, r.title, r.status,
|
||||
s.verify, s.spec_version,
|
||||
e.evidence_type, e.content, e.recorded_at, e.phase_name, e.recorded_by
|
||||
FROM tasks r
|
||||
LEFT JOIN task_specs s ON r.milestone_id = s.milestone_id AND r.slice_id = s.slice_id AND r.id = s.task_id
|
||||
LEFT JOIN task_evidence e ON r.milestone_id = e.milestone_id AND r.slice_id = e.slice_id AND r.id = e.task_id
|
||||
WHERE r.milestone_id = ? AND r.slice_id = ? AND r.id = ?
|
||||
ORDER BY e.recorded_at ASC
|
||||
`)
|
||||
.all(milestoneId, sliceId, taskId);
|
||||
}
|
||||
|
||||
export function insertAssessment(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR REPLACE INTO assessments (path, milestone_id, slice_id, task_id, status, scope, full_content, created_at)
|
||||
VALUES (:path, :milestone_id, :slice_id, :task_id, :status, :scope, :full_content, :created_at)`)
|
||||
.run({
|
||||
":path": entry.path,
|
||||
":milestone_id": entry.milestoneId,
|
||||
":slice_id": entry.sliceId ?? null,
|
||||
":task_id": entry.taskId ?? null,
|
||||
":status": entry.status,
|
||||
":scope": entry.scope,
|
||||
":full_content": entry.fullContent,
|
||||
":created_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function deleteAssessmentByScope(milestoneId, scope) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`DELETE FROM assessments WHERE milestone_id = :mid AND scope = :scope`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":scope": scope });
|
||||
}
|
||||
|
||||
export function getAssessment(path) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare(`SELECT * FROM assessments WHERE path = :path`)
|
||||
.get({ ":path": path });
|
||||
return row ?? null;
|
||||
}
|
||||
|
||||
export function getAssessmentByScope(milestoneId, scope) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
`SELECT * FROM assessments
|
||||
WHERE milestone_id = :mid AND scope = :scope
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1`,
|
||||
)
|
||||
.get({ ":mid": milestoneId, ":scope": scope });
|
||||
return row ?? null;
|
||||
}
|
||||
|
||||
export function getMilestoneValidationAssessment(milestoneId) {
|
||||
return getAssessmentByScope(milestoneId, "milestone-validation");
|
||||
}
|
||||
|
||||
export function insertReplanHistory(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
// INSERT OR REPLACE: idempotent on (milestone_id, slice_id, task_id) via schema v11 unique index.
|
||||
// Retrying the same replan silently updates summary instead of accumulating duplicate rows.
|
||||
currentDb
|
||||
.prepare(`INSERT OR REPLACE INTO replan_history (milestone_id, slice_id, task_id, summary, previous_artifact_path, replacement_artifact_path, created_at)
|
||||
VALUES (:milestone_id, :slice_id, :task_id, :summary, :previous_artifact_path, :replacement_artifact_path, :created_at)`)
|
||||
.run({
|
||||
":milestone_id": entry.milestoneId,
|
||||
":slice_id": entry.sliceId ?? null,
|
||||
":task_id": entry.taskId ?? null,
|
||||
":summary": entry.summary,
|
||||
":previous_artifact_path": entry.previousArtifactPath ?? null,
|
||||
":replacement_artifact_path": entry.replacementArtifactPath ?? null,
|
||||
":created_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function getReplanHistory(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
if (sliceId) {
|
||||
return currentDb
|
||||
.prepare(
|
||||
`SELECT * FROM replan_history WHERE milestone_id = :mid AND slice_id = :sid ORDER BY created_at DESC`,
|
||||
)
|
||||
.all({ ":mid": milestoneId, ":sid": sliceId });
|
||||
}
|
||||
return currentDb
|
||||
.prepare(
|
||||
`SELECT * FROM replan_history WHERE milestone_id = :mid ORDER BY created_at DESC`,
|
||||
)
|
||||
.all({ ":mid": milestoneId });
|
||||
}
|
||||
372
src/resources/extensions/sf/sf-db/sf-db-gates.js
Normal file
372
src/resources/extensions/sf/sf-db/sf-db-gates.js
Normal file
|
|
@ -0,0 +1,372 @@
|
|||
import { dirname } from 'node:path';
|
||||
import { _getAdapter, getDbPath, rowToGate, transaction } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { getGateIdsForTurn } from '../gate-registry.js';
|
||||
import { readTraceEvents } from '../uok/trace-writer.js';
|
||||
import { logWarning } from '../workflow-logger.js';
|
||||
|
||||
export function insertGateRow(g) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR IGNORE INTO quality_gates (milestone_id, slice_id, gate_id, scope, task_id, status)
|
||||
VALUES (:mid, :sid, :gid, :scope, :tid, :status)`)
|
||||
.run({
|
||||
":mid": g.milestoneId,
|
||||
":sid": g.sliceId,
|
||||
":gid": g.gateId,
|
||||
":scope": g.scope,
|
||||
":tid": g.taskId ?? "",
|
||||
":status": g.status ?? "pending",
|
||||
});
|
||||
}
|
||||
|
||||
export function saveGateResult(g) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`UPDATE quality_gates
|
||||
SET status = 'complete', verdict = :verdict, rationale = :rationale,
|
||||
findings = :findings, evaluated_at = :evaluated_at
|
||||
WHERE milestone_id = :mid AND slice_id = :sid AND gate_id = :gid
|
||||
AND task_id = :tid`)
|
||||
.run({
|
||||
":mid": g.milestoneId,
|
||||
":sid": g.sliceId,
|
||||
":gid": g.gateId,
|
||||
":tid": g.taskId ?? "",
|
||||
":verdict": g.verdict,
|
||||
":rationale": g.rationale,
|
||||
":findings": g.findings,
|
||||
":evaluated_at": new Date().toISOString(),
|
||||
});
|
||||
const outcome =
|
||||
g.verdict === "pass"
|
||||
? "pass"
|
||||
: g.verdict === "omitted"
|
||||
? "manual-attention"
|
||||
: "fail";
|
||||
insertGateRun({
|
||||
traceId: `quality-gate:${g.milestoneId}:${g.sliceId}`,
|
||||
turnId: `gate:${g.gateId}:${g.taskId ?? "slice"}`,
|
||||
gateId: g.gateId,
|
||||
gateType: "quality-gate",
|
||||
milestoneId: g.milestoneId,
|
||||
sliceId: g.sliceId,
|
||||
taskId: g.taskId ?? undefined,
|
||||
outcome,
|
||||
failureClass:
|
||||
outcome === "fail"
|
||||
? "verification"
|
||||
: outcome === "manual-attention"
|
||||
? "manual-attention"
|
||||
: "none",
|
||||
rationale: g.rationale,
|
||||
findings: g.findings,
|
||||
attempt: 1,
|
||||
maxAttempts: 1,
|
||||
retryable: false,
|
||||
evaluatedAt: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function getPendingGates(milestoneId, sliceId, scope) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const sql = scope
|
||||
? `SELECT * FROM quality_gates WHERE milestone_id = :mid AND slice_id = :sid AND scope = :scope AND status = 'pending'`
|
||||
: `SELECT * FROM quality_gates WHERE milestone_id = :mid AND slice_id = :sid AND status = 'pending'`;
|
||||
const params = {
|
||||
":mid": milestoneId,
|
||||
":sid": sliceId,
|
||||
};
|
||||
if (scope) params[":scope"] = scope;
|
||||
return currentDb.prepare(sql).all(params).map(rowToGate);
|
||||
}
|
||||
|
||||
export function getGateResults(milestoneId, sliceId, scope) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const sql = scope
|
||||
? `SELECT * FROM quality_gates WHERE milestone_id = :mid AND slice_id = :sid AND scope = :scope`
|
||||
: `SELECT * FROM quality_gates WHERE milestone_id = :mid AND slice_id = :sid`;
|
||||
const params = {
|
||||
":mid": milestoneId,
|
||||
":sid": sliceId,
|
||||
};
|
||||
if (scope) params[":scope"] = scope;
|
||||
return currentDb.prepare(sql).all(params).map(rowToGate);
|
||||
}
|
||||
|
||||
export function markAllGatesOmitted(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(`UPDATE quality_gates SET status = 'omitted', verdict = 'omitted', evaluated_at = :now
|
||||
WHERE milestone_id = :mid AND slice_id = :sid AND status = 'pending'`)
|
||||
.run({
|
||||
":mid": milestoneId,
|
||||
":sid": sliceId,
|
||||
":now": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function getPendingSliceGateCount(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return 0;
|
||||
const row = currentDb
|
||||
.prepare(`SELECT COUNT(*) as cnt FROM quality_gates
|
||||
WHERE milestone_id = :mid AND slice_id = :sid AND scope = 'slice' AND status = 'pending'`)
|
||||
.get({ ":mid": milestoneId, ":sid": sliceId });
|
||||
return row ? row["cnt"] : 0;
|
||||
}
|
||||
|
||||
export function getPendingGatesForTurn(milestoneId, sliceId, turn, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const ids = getGateIdsForTurn(turn);
|
||||
if (ids.size === 0) return [];
|
||||
const idList = [...ids];
|
||||
const placeholders = idList.map((_, i) => `:gid${i}`).join(",");
|
||||
const params = {
|
||||
":mid": milestoneId,
|
||||
":sid": sliceId,
|
||||
};
|
||||
idList.forEach((id, i) => {
|
||||
params[`:gid${i}`] = id;
|
||||
});
|
||||
let sql = `SELECT * FROM quality_gates
|
||||
WHERE milestone_id = :mid AND slice_id = :sid
|
||||
AND status = 'pending'
|
||||
AND gate_id IN (${placeholders})`;
|
||||
if (taskId !== undefined) {
|
||||
sql += ` AND task_id = :tid`;
|
||||
params[":tid"] = taskId;
|
||||
}
|
||||
return currentDb.prepare(sql).all(params).map(rowToGate);
|
||||
}
|
||||
|
||||
export function getPendingGateCountForTurn(milestoneId, sliceId, turn) {
|
||||
return getPendingGatesForTurn(milestoneId, sliceId, turn).length;
|
||||
}
|
||||
|
||||
export function insertGateRun(_entry) {
|
||||
// no-op: gate runs now written to JSONL trace files
|
||||
}
|
||||
|
||||
export function upsertTurnGitTransaction(_entry) {
|
||||
// no-op: turn git transactions now written to JSONL audit events
|
||||
}
|
||||
|
||||
export function getGateRunStats(gateId, windowHours = 24) {
|
||||
try {
|
||||
const currentPath = getDbPath();
|
||||
const basePath =
|
||||
currentPath && currentPath !== ":memory:"
|
||||
? dirname(dirname(currentPath))
|
||||
: process.cwd();
|
||||
const events = readTraceEvents(basePath, "gate_run", windowHours).filter(
|
||||
(e) => e.gateId === gateId,
|
||||
);
|
||||
const stats = {
|
||||
total: events.length,
|
||||
pass: 0,
|
||||
fail: 0,
|
||||
retry: 0,
|
||||
manualAttention: 0,
|
||||
lastEvaluatedAt: null,
|
||||
};
|
||||
for (const e of events) {
|
||||
if (e.outcome === "pass") stats.pass++;
|
||||
else if (e.outcome === "fail") stats.fail++;
|
||||
else if (e.outcome === "retry") stats.retry++;
|
||||
else if (e.outcome === "manual-attention") stats.manualAttention++;
|
||||
if (
|
||||
!stats.lastEvaluatedAt ||
|
||||
(e.evaluatedAt ?? e.ts) > stats.lastEvaluatedAt
|
||||
)
|
||||
stats.lastEvaluatedAt = e.evaluatedAt ?? e.ts;
|
||||
}
|
||||
return stats;
|
||||
} catch {
|
||||
return {
|
||||
total: 0,
|
||||
pass: 0,
|
||||
fail: 0,
|
||||
retry: 0,
|
||||
manualAttention: 0,
|
||||
lastEvaluatedAt: null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function getGateCircuitBreaker(gateId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) {
|
||||
return {
|
||||
gateId,
|
||||
state: "closed",
|
||||
failureStreak: 0,
|
||||
lastFailureAt: null,
|
||||
openedAt: null,
|
||||
halfOpenAttempts: 0,
|
||||
updatedAt: null,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
`SELECT gate_id, state, failure_streak, last_failure_at, opened_at, half_open_attempts, updated_at
|
||||
FROM gate_circuit_breakers
|
||||
WHERE gate_id = :gate_id`,
|
||||
)
|
||||
.get({ ":gate_id": gateId });
|
||||
if (!row) {
|
||||
return {
|
||||
gateId,
|
||||
state: "closed",
|
||||
failureStreak: 0,
|
||||
lastFailureAt: null,
|
||||
openedAt: null,
|
||||
halfOpenAttempts: 0,
|
||||
updatedAt: null,
|
||||
};
|
||||
}
|
||||
return {
|
||||
gateId: row.gate_id,
|
||||
state: row.state,
|
||||
failureStreak: row.failure_streak ?? 0,
|
||||
lastFailureAt: row.last_failure_at ?? null,
|
||||
openedAt: row.opened_at ?? null,
|
||||
halfOpenAttempts: row.half_open_attempts ?? 0,
|
||||
updatedAt: row.updated_at ?? null,
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
gateId,
|
||||
state: "closed",
|
||||
failureStreak: 0,
|
||||
lastFailureAt: null,
|
||||
openedAt: null,
|
||||
halfOpenAttempts: 0,
|
||||
updatedAt: null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function updateGateCircuitBreaker(gateId, updates) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO gate_circuit_breakers (
|
||||
gate_id, state, failure_streak, last_failure_at, opened_at, half_open_attempts, updated_at
|
||||
) VALUES (
|
||||
:gate_id, :state, :failure_streak, :last_failure_at, :opened_at, :half_open_attempts, :updated_at
|
||||
)
|
||||
ON CONFLICT(gate_id) DO UPDATE SET
|
||||
state = excluded.state,
|
||||
failure_streak = excluded.failure_streak,
|
||||
last_failure_at = COALESCE(excluded.last_failure_at, gate_circuit_breakers.last_failure_at),
|
||||
opened_at = COALESCE(excluded.opened_at, gate_circuit_breakers.opened_at),
|
||||
half_open_attempts = excluded.half_open_attempts,
|
||||
updated_at = excluded.updated_at`,
|
||||
)
|
||||
.run({
|
||||
":gate_id": gateId,
|
||||
":state": updates.state ?? "closed",
|
||||
":failure_streak": updates.failureStreak ?? 0,
|
||||
":last_failure_at": updates.lastFailureAt ?? null,
|
||||
":opened_at": updates.openedAt ?? null,
|
||||
":half_open_attempts": updates.halfOpenAttempts ?? 0,
|
||||
":updated_at": new Date().toISOString(),
|
||||
});
|
||||
return { total: 0, avgMs: 0, p50Ms: 0, p95Ms: 0, maxMs: 0 };
|
||||
}
|
||||
|
||||
export function getGateLatencyStats(gateId, windowHours = 24) {
|
||||
try {
|
||||
const currentPath = getDbPath();
|
||||
const basePath =
|
||||
currentPath && currentPath !== ":memory:"
|
||||
? dirname(dirname(currentPath))
|
||||
: process.cwd();
|
||||
const durations = readTraceEvents(basePath, "gate_run", windowHours)
|
||||
.filter((e) => e.gateId === gateId && typeof e.durationMs === "number")
|
||||
.map((e) => e.durationMs)
|
||||
.sort((a, b) => a - b);
|
||||
if (durations.length === 0)
|
||||
return {
|
||||
p50: null,
|
||||
p95: null,
|
||||
count: 0,
|
||||
total: 0,
|
||||
avgMs: 0,
|
||||
p50Ms: 0,
|
||||
p95Ms: 0,
|
||||
maxMs: 0,
|
||||
};
|
||||
const p50Ms = durations[Math.floor(durations.length * 0.5)] ?? 0;
|
||||
const p95Ms = durations[Math.floor(durations.length * 0.95)] ?? 0;
|
||||
const maxMs = durations[durations.length - 1] ?? 0;
|
||||
const avgMs = Math.round(
|
||||
durations.reduce((s, v) => s + v, 0) / durations.length,
|
||||
);
|
||||
return {
|
||||
p50: p50Ms,
|
||||
p95: p95Ms,
|
||||
count: durations.length,
|
||||
total: durations.length,
|
||||
avgMs,
|
||||
p50Ms,
|
||||
p95Ms,
|
||||
maxMs,
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
p50: null,
|
||||
p95: null,
|
||||
count: 0,
|
||||
total: 0,
|
||||
avgMs: 0,
|
||||
p50Ms: 0,
|
||||
p95Ms: 0,
|
||||
maxMs: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function getDistinctGateIds() {
|
||||
try {
|
||||
const currentPath = getDbPath();
|
||||
const basePath =
|
||||
currentPath && currentPath !== ":memory:"
|
||||
? dirname(dirname(currentPath))
|
||||
: process.cwd();
|
||||
const events = readTraceEvents(basePath, "gate_run", 24 * 30); // 30 days
|
||||
return [...new Set(events.map((e) => e.gateId).filter(Boolean))];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function upsertQualityGate(g) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR REPLACE INTO quality_gates
|
||||
(milestone_id, slice_id, gate_id, scope, task_id, status, verdict, rationale, findings, evaluated_at)
|
||||
VALUES (:mid, :sid, :gid, :scope, :tid, :status, :verdict, :rationale, :findings, :evaluated_at)`)
|
||||
.run({
|
||||
":mid": g.milestoneId,
|
||||
":sid": g.sliceId,
|
||||
":gid": g.gateId,
|
||||
":scope": g.scope,
|
||||
":tid": g.taskId,
|
||||
":status": g.status,
|
||||
":verdict": g.verdict,
|
||||
":rationale": g.rationale,
|
||||
":findings": g.findings,
|
||||
":evaluated_at": g.evaluatedAt,
|
||||
});
|
||||
}
|
||||
543
src/resources/extensions/sf/sf-db/sf-db-learning.js
Normal file
543
src/resources/extensions/sf/sf-db/sf-db-learning.js
Normal file
|
|
@ -0,0 +1,543 @@
|
|||
import { _getAdapter, boolToInt, intBool, parseJsonObject, solverEvalRunFromRow, solverEvalCaseFromRow, headlessRunFromRow, transaction } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { logError, logWarning } from '../workflow-logger.js';
|
||||
|
||||
export function insertLlmTaskOutcome(input) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
try {
|
||||
currentDb
|
||||
.prepare(`INSERT INTO llm_task_outcomes (
|
||||
model_id,
|
||||
provider,
|
||||
unit_type,
|
||||
unit_id,
|
||||
succeeded,
|
||||
retries,
|
||||
escalated,
|
||||
verification_passed,
|
||||
blocker_discovered,
|
||||
duration_ms,
|
||||
tokens_total,
|
||||
cost_usd,
|
||||
failure_mode,
|
||||
recorded_at
|
||||
) VALUES (
|
||||
:model_id,
|
||||
:provider,
|
||||
:unit_type,
|
||||
:unit_id,
|
||||
:succeeded,
|
||||
:retries,
|
||||
:escalated,
|
||||
:verification_passed,
|
||||
:blocker_discovered,
|
||||
:duration_ms,
|
||||
:tokens_total,
|
||||
:cost_usd,
|
||||
:failure_mode,
|
||||
:recorded_at
|
||||
)
|
||||
ON CONFLICT(unit_type, unit_id, recorded_at) DO UPDATE SET
|
||||
model_id = excluded.model_id,
|
||||
provider = excluded.provider,
|
||||
succeeded = excluded.succeeded,
|
||||
retries = excluded.retries,
|
||||
escalated = excluded.escalated,
|
||||
verification_passed = excluded.verification_passed,
|
||||
blocker_discovered = excluded.blocker_discovered,
|
||||
duration_ms = excluded.duration_ms,
|
||||
tokens_total = excluded.tokens_total,
|
||||
cost_usd = excluded.cost_usd,
|
||||
failure_mode = excluded.failure_mode`)
|
||||
.run({
|
||||
":model_id": input.modelId,
|
||||
":provider": input.provider,
|
||||
":unit_type": input.unitType,
|
||||
":unit_id": input.unitId,
|
||||
":succeeded": boolToInt(input.succeeded),
|
||||
":retries": input.retries ?? 0,
|
||||
":escalated": boolToInt(input.escalated ?? false),
|
||||
":verification_passed": boolToInt(input.verification_passed ?? null),
|
||||
":blocker_discovered": boolToInt(input.blocker_discovered ?? false),
|
||||
":duration_ms": input.duration_ms ?? null,
|
||||
":tokens_total": input.tokens_total ?? null,
|
||||
":cost_usd": input.cost_usd ?? null,
|
||||
":failure_mode": input.failure_mode ?? null,
|
||||
":recorded_at": input.recorded_at ?? Date.now(),
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function getLlmTaskOutcomesByUnit(unitType, unitId, limit = 20) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
try {
|
||||
return currentDb
|
||||
.prepare(
|
||||
`SELECT
|
||||
model_id,
|
||||
provider,
|
||||
unit_type,
|
||||
unit_id,
|
||||
succeeded,
|
||||
retries,
|
||||
escalated,
|
||||
verification_passed,
|
||||
blocker_discovered,
|
||||
duration_ms,
|
||||
tokens_total,
|
||||
cost_usd,
|
||||
recorded_at
|
||||
FROM llm_task_outcomes
|
||||
WHERE unit_type = :unit_type
|
||||
AND unit_id = :unit_id
|
||||
ORDER BY recorded_at DESC
|
||||
LIMIT :limit`,
|
||||
)
|
||||
.all({
|
||||
":unit_type": unitType,
|
||||
":unit_id": unitId,
|
||||
":limit": limit,
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function getLlmTaskOutcomesByModel(modelId, limit = 50) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
try {
|
||||
return currentDb
|
||||
.prepare(
|
||||
`SELECT
|
||||
model_id,
|
||||
provider,
|
||||
unit_type,
|
||||
unit_id,
|
||||
succeeded,
|
||||
retries,
|
||||
escalated,
|
||||
verification_passed,
|
||||
blocker_discovered,
|
||||
duration_ms,
|
||||
tokens_total,
|
||||
cost_usd,
|
||||
recorded_at
|
||||
FROM llm_task_outcomes
|
||||
WHERE model_id = :model_id
|
||||
ORDER BY recorded_at DESC
|
||||
LIMIT :limit`,
|
||||
)
|
||||
.all({
|
||||
":model_id": modelId,
|
||||
":limit": limit,
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function getRecentLlmTaskOutcomes(hours = 24, limit = 100) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const cutoff = Date.now() - hours * 60 * 60 * 1000;
|
||||
try {
|
||||
return currentDb
|
||||
.prepare(
|
||||
`SELECT
|
||||
model_id,
|
||||
provider,
|
||||
unit_type,
|
||||
unit_id,
|
||||
succeeded,
|
||||
retries,
|
||||
escalated,
|
||||
verification_passed,
|
||||
blocker_discovered,
|
||||
duration_ms,
|
||||
tokens_total,
|
||||
cost_usd,
|
||||
recorded_at
|
||||
FROM llm_task_outcomes
|
||||
WHERE recorded_at >= :cutoff
|
||||
ORDER BY recorded_at DESC
|
||||
LIMIT :limit`,
|
||||
)
|
||||
.all({
|
||||
":cutoff": cutoff,
|
||||
":limit": limit,
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function getLlmTaskOutcomeStats(modelId, windowHours = 24) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) {
|
||||
return {
|
||||
total: 0,
|
||||
succeeded: 0,
|
||||
failed: 0,
|
||||
totalCostUsd: 0,
|
||||
totalTokens: 0,
|
||||
avgDurationMs: 0,
|
||||
};
|
||||
}
|
||||
const cutoff = Date.now() - windowHours * 60 * 60 * 1000;
|
||||
try {
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
`SELECT
|
||||
COUNT(*) AS total,
|
||||
COALESCE(SUM(CASE WHEN succeeded = 1 THEN 1 ELSE 0 END), 0) AS succeeded,
|
||||
COALESCE(SUM(CASE WHEN succeeded = 0 THEN 1 ELSE 0 END), 0) AS failed,
|
||||
COALESCE(SUM(cost_usd), 0) AS totalCostUsd,
|
||||
COALESCE(SUM(tokens_total), 0) AS totalTokens,
|
||||
COALESCE(AVG(duration_ms), 0) AS avgDurationMs
|
||||
FROM llm_task_outcomes
|
||||
WHERE model_id = :model_id
|
||||
AND recorded_at >= :cutoff`,
|
||||
)
|
||||
.get({ ":model_id": modelId, ":cutoff": cutoff });
|
||||
if (!row) {
|
||||
return {
|
||||
total: 0,
|
||||
succeeded: 0,
|
||||
failed: 0,
|
||||
totalCostUsd: 0,
|
||||
totalTokens: 0,
|
||||
avgDurationMs: 0,
|
||||
};
|
||||
}
|
||||
return {
|
||||
total: row.total ?? 0,
|
||||
succeeded: row.succeeded ?? 0,
|
||||
failed: row.failed ?? 0,
|
||||
totalCostUsd: row.totalCostUsd ?? 0,
|
||||
totalTokens: row.totalTokens ?? 0,
|
||||
avgDurationMs: row.avgDurationMs ?? 0,
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
total: 0,
|
||||
succeeded: 0,
|
||||
failed: 0,
|
||||
totalCostUsd: 0,
|
||||
totalTokens: 0,
|
||||
avgDurationMs: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function recordSolverEvalRun(report) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const now = new Date().toISOString();
|
||||
transaction(() => {
|
||||
currentDb
|
||||
.prepare(`INSERT INTO solver_eval_runs (
|
||||
run_id, suite_source, cases_count, summary_json, report_path,
|
||||
results_path, db_recorded, created_at, updated_at
|
||||
) VALUES (
|
||||
:run_id, :suite_source, :cases_count, :summary_json, :report_path,
|
||||
:results_path, 1, :created_at, :updated_at
|
||||
)
|
||||
ON CONFLICT(run_id) DO UPDATE SET
|
||||
suite_source = excluded.suite_source,
|
||||
cases_count = excluded.cases_count,
|
||||
summary_json = excluded.summary_json,
|
||||
report_path = excluded.report_path,
|
||||
results_path = excluded.results_path,
|
||||
db_recorded = 1,
|
||||
updated_at = excluded.updated_at`)
|
||||
.run({
|
||||
":run_id": report.runId,
|
||||
":suite_source": report.suiteSource ?? "",
|
||||
":cases_count": report.summary?.cases ?? report.results?.length ?? 0,
|
||||
":summary_json": JSON.stringify(report.summary ?? {}),
|
||||
":report_path": report.reportPath ?? "",
|
||||
":results_path": report.resultsPath ?? "",
|
||||
":created_at": report.createdAt ?? now,
|
||||
":updated_at": now,
|
||||
});
|
||||
const stmt = currentDb.prepare(`INSERT INTO solver_eval_case_results (
|
||||
run_id, case_id, title, mode, passed, false_complete, duration_ms,
|
||||
command_status, solver_outcome, pdd_complete, result_json, created_at
|
||||
) VALUES (
|
||||
:run_id, :case_id, :title, :mode, :passed, :false_complete, :duration_ms,
|
||||
:command_status, :solver_outcome, :pdd_complete, :result_json, :created_at
|
||||
)
|
||||
ON CONFLICT(run_id, case_id, mode) DO UPDATE SET
|
||||
title = excluded.title,
|
||||
passed = excluded.passed,
|
||||
false_complete = excluded.false_complete,
|
||||
duration_ms = excluded.duration_ms,
|
||||
command_status = excluded.command_status,
|
||||
solver_outcome = excluded.solver_outcome,
|
||||
pdd_complete = excluded.pdd_complete,
|
||||
result_json = excluded.result_json,
|
||||
created_at = excluded.created_at`);
|
||||
for (const result of report.results ?? []) {
|
||||
stmt.run({
|
||||
":run_id": report.runId,
|
||||
":case_id": result.caseId,
|
||||
":title": result.title ?? "",
|
||||
":mode": result.mode,
|
||||
":passed": intBool(result.passed),
|
||||
":false_complete": intBool(result.falseComplete),
|
||||
":duration_ms": result.command?.durationMs ?? null,
|
||||
":command_status": result.command?.status ?? null,
|
||||
":solver_outcome": result.solverSignals?.outcome ?? null,
|
||||
":pdd_complete":
|
||||
result.solverSignals?.pddComplete === undefined
|
||||
? null
|
||||
: intBool(result.solverSignals.pddComplete),
|
||||
":result_json": JSON.stringify(result),
|
||||
":created_at": report.createdAt ?? now,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function listSolverEvalRuns(limit = 10) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
return currentDb
|
||||
.prepare(`SELECT run_id, suite_source, cases_count, summary_json,
|
||||
report_path, results_path, db_recorded, created_at, updated_at
|
||||
FROM solver_eval_runs
|
||||
ORDER BY created_at DESC, run_id DESC
|
||||
LIMIT :limit`)
|
||||
.all({ ":limit": Math.max(1, Math.min(100, Number(limit) || 10)) })
|
||||
.map(solverEvalRunFromRow);
|
||||
}
|
||||
|
||||
export function getSolverEvalRun(runId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const row = currentDb
|
||||
.prepare(`SELECT run_id, suite_source, cases_count, summary_json,
|
||||
report_path, results_path, db_recorded, created_at, updated_at
|
||||
FROM solver_eval_runs
|
||||
WHERE run_id = :run_id`)
|
||||
.get({ ":run_id": runId });
|
||||
return row ? solverEvalRunFromRow(row) : null;
|
||||
}
|
||||
|
||||
export function getSolverEvalCaseResults(runId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
return currentDb
|
||||
.prepare(`SELECT run_id, case_id, title, mode, passed, false_complete,
|
||||
duration_ms, command_status, solver_outcome, pdd_complete,
|
||||
result_json, created_at
|
||||
FROM solver_eval_case_results
|
||||
WHERE run_id = :run_id
|
||||
ORDER BY case_id ASC, mode ASC`)
|
||||
.all({ ":run_id": runId })
|
||||
.map(solverEvalCaseFromRow);
|
||||
}
|
||||
|
||||
export function recordHeadlessRun(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const now = new Date().toISOString();
|
||||
currentDb
|
||||
.prepare(`INSERT INTO headless_runs (
|
||||
run_id, command, status, exit_code, timed_out, interrupted,
|
||||
restart_count, max_restarts, duration_ms, total_events, tool_calls,
|
||||
solver_eval_run_id, solver_eval_report_path, details_json,
|
||||
created_at, updated_at
|
||||
) VALUES (
|
||||
:run_id, :command, :status, :exit_code, :timed_out, :interrupted,
|
||||
:restart_count, :max_restarts, :duration_ms, :total_events, :tool_calls,
|
||||
:solver_eval_run_id, :solver_eval_report_path, :details_json,
|
||||
:created_at, :updated_at
|
||||
)
|
||||
ON CONFLICT(run_id) DO UPDATE SET
|
||||
command = excluded.command,
|
||||
status = excluded.status,
|
||||
exit_code = excluded.exit_code,
|
||||
timed_out = excluded.timed_out,
|
||||
interrupted = excluded.interrupted,
|
||||
restart_count = excluded.restart_count,
|
||||
max_restarts = excluded.max_restarts,
|
||||
duration_ms = excluded.duration_ms,
|
||||
total_events = excluded.total_events,
|
||||
tool_calls = excluded.tool_calls,
|
||||
solver_eval_run_id = excluded.solver_eval_run_id,
|
||||
solver_eval_report_path = excluded.solver_eval_report_path,
|
||||
details_json = excluded.details_json,
|
||||
updated_at = excluded.updated_at`)
|
||||
.run({
|
||||
":run_id": entry.runId,
|
||||
":command": entry.command ?? "",
|
||||
":status": entry.status ?? "",
|
||||
":exit_code": Number(entry.exitCode ?? 0),
|
||||
":timed_out": intBool(entry.timedOut),
|
||||
":interrupted": intBool(entry.interrupted),
|
||||
":restart_count": Number(entry.restartCount ?? 0),
|
||||
":max_restarts": Number(entry.maxRestarts ?? 0),
|
||||
":duration_ms": Number(entry.durationMs ?? 0),
|
||||
":total_events": Number(entry.totalEvents ?? 0),
|
||||
":tool_calls": Number(entry.toolCalls ?? 0),
|
||||
":solver_eval_run_id": entry.solverEvalRunId ?? null,
|
||||
":solver_eval_report_path": entry.solverEvalReportPath ?? null,
|
||||
":details_json": JSON.stringify(entry.details ?? {}),
|
||||
":created_at": entry.createdAt ?? now,
|
||||
":updated_at": now,
|
||||
});
|
||||
}
|
||||
|
||||
export function listHeadlessRuns(limit = 20) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
return currentDb
|
||||
.prepare(`SELECT run_id, command, status, exit_code, timed_out,
|
||||
interrupted, restart_count, max_restarts, duration_ms,
|
||||
total_events, tool_calls, solver_eval_run_id,
|
||||
solver_eval_report_path, details_json, created_at, updated_at
|
||||
FROM headless_runs
|
||||
ORDER BY created_at DESC, run_id DESC
|
||||
LIMIT :limit`)
|
||||
.all({ ":limit": Math.max(1, Math.min(100, Number(limit) || 20)) })
|
||||
.map(headlessRunFromRow);
|
||||
}
|
||||
|
||||
export function upsertRoutingOutcome(db, pattern, tier, success) {
|
||||
db.prepare(
|
||||
`INSERT INTO routing_history (pattern, tier, success_count, fail_count, updated_at)
|
||||
VALUES (:pattern, :tier, :success_count, :fail_count, :updated_at)
|
||||
ON CONFLICT(pattern, tier) DO UPDATE SET
|
||||
success_count = success_count + excluded.success_count,
|
||||
fail_count = fail_count + excluded.fail_count,
|
||||
updated_at = excluded.updated_at`,
|
||||
).run({
|
||||
":pattern": pattern,
|
||||
":tier": tier,
|
||||
":success_count": success ? 1 : 0,
|
||||
":fail_count": success ? 0 : 1,
|
||||
":updated_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function getAllRoutingHistory(db) {
|
||||
return db
|
||||
.prepare(
|
||||
"SELECT pattern, tier, success_count, fail_count, updated_at FROM routing_history",
|
||||
)
|
||||
.all();
|
||||
}
|
||||
|
||||
export function getRoutingHistoryForPattern(db, pattern) {
|
||||
return db
|
||||
.prepare(
|
||||
"SELECT tier, success_count, fail_count FROM routing_history WHERE pattern = ?",
|
||||
)
|
||||
.all(pattern);
|
||||
}
|
||||
|
||||
export function insertRoutingFeedback(db, pattern, tier, feedback) {
|
||||
db.prepare(
|
||||
`INSERT INTO routing_feedback (pattern, tier, feedback, recorded_at)
|
||||
VALUES (:pattern, :tier, :feedback, :recorded_at)`,
|
||||
).run({
|
||||
":pattern": pattern,
|
||||
":tier": tier,
|
||||
":feedback": feedback,
|
||||
":recorded_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function clearRoutingHistory(db) {
|
||||
db.prepare("DELETE FROM routing_history").run();
|
||||
db.prepare("DELETE FROM routing_feedback").run();
|
||||
}
|
||||
|
||||
export function insertTriageRun(id, sourceFile, createdAt) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO triage_runs (id, source_file, status, created_at)
|
||||
VALUES (:id, :source_file, 'complete', :created_at)
|
||||
ON CONFLICT(id) DO NOTHING`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":source_file": sourceFile ?? null,
|
||||
":created_at": createdAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function insertTriageEval(id, runId, data, createdAt) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO triage_evals (id, run_id, task_input, expected_behavior, evidence, failure_mode, status, created_at)
|
||||
VALUES (:id, :run_id, :task_input, :expected_behavior, :evidence, :failure_mode, 'pending', :created_at)
|
||||
ON CONFLICT(id) DO NOTHING`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":run_id": runId,
|
||||
":task_input": data.task_input ?? "",
|
||||
":expected_behavior": data.expected_behavior ?? "",
|
||||
":evidence": data.evidence ?? null,
|
||||
":failure_mode": data.failure_mode ?? null,
|
||||
":created_at": createdAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function insertTriageItem(
|
||||
id,
|
||||
runId,
|
||||
kind,
|
||||
content,
|
||||
evidence,
|
||||
createdAt,
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO triage_items (id, run_id, kind, content, evidence, status, created_at)
|
||||
VALUES (:id, :run_id, :kind, :content, :evidence, 'pending', :created_at)
|
||||
ON CONFLICT(id) DO NOTHING`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":run_id": runId,
|
||||
":kind": kind,
|
||||
":content": content,
|
||||
":evidence": evidence ?? null,
|
||||
":created_at": createdAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function insertTriageSkill(id, runId, data, createdAt) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO triage_skills (id, run_id, name, description, trigger, raw_json, status, created_at)
|
||||
VALUES (:id, :run_id, :name, :description, :trigger, :raw_json, 'pending', :created_at)
|
||||
ON CONFLICT(id) DO NOTHING`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":run_id": runId,
|
||||
":name": data.title ?? data.name ?? null,
|
||||
":description": data.description ?? null,
|
||||
":trigger": data.trigger_pattern ?? data.trigger ?? null,
|
||||
":raw_json": JSON.stringify(data),
|
||||
":created_at": createdAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
329
src/resources/extensions/sf/sf-db/sf-db-memory.js
Normal file
329
src/resources/extensions/sf/sf-db/sf-db-memory.js
Normal file
|
|
@ -0,0 +1,329 @@
|
|||
import { _getAdapter, intBool, parseJsonObject } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { logWarning } from '../workflow-logger.js';
|
||||
|
||||
export function getActiveMemories({ category, limit = 200 } = {}) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const rows = category
|
||||
? currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM active_memories WHERE category = ? ORDER BY updated_at DESC LIMIT ?",
|
||||
)
|
||||
.all(category, limit)
|
||||
: currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM active_memories ORDER BY updated_at DESC LIMIT ?",
|
||||
)
|
||||
.all(limit);
|
||||
return rows.map((r) => ({
|
||||
id: r["id"],
|
||||
category: r["category"],
|
||||
content: r["content"],
|
||||
confidence: r["confidence"],
|
||||
sourceUnitId: r["source_unit_id"],
|
||||
tags: (() => {
|
||||
try {
|
||||
return JSON.parse(r["tags"] ?? "[]");
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
})(),
|
||||
createdAt: r["created_at"],
|
||||
updatedAt: r["updated_at"],
|
||||
}));
|
||||
}
|
||||
|
||||
export function insertMemoryRow(args) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT INTO memories (id, category, content, confidence, source_unit_type, source_unit_id, created_at, updated_at, tags)
|
||||
VALUES (:id, :category, :content, :confidence, :source_unit_type, :source_unit_id, :created_at, :updated_at, :tags)`)
|
||||
.run({
|
||||
":id": args.id,
|
||||
":category": args.category,
|
||||
":content": args.content,
|
||||
":confidence": args.confidence,
|
||||
":source_unit_type": args.sourceUnitType,
|
||||
":source_unit_id": args.sourceUnitId,
|
||||
":created_at": args.createdAt,
|
||||
":updated_at": args.updatedAt,
|
||||
":tags": JSON.stringify(args.tags ?? []),
|
||||
});
|
||||
}
|
||||
|
||||
export function rewriteMemoryId(placeholderId, realId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare("UPDATE memories SET id = :real_id WHERE id = :placeholder")
|
||||
.run({
|
||||
":real_id": realId,
|
||||
":placeholder": placeholderId,
|
||||
});
|
||||
}
|
||||
|
||||
export function updateMemoryContentRow(id, content, confidence, updatedAt) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
if (confidence != null) {
|
||||
currentDb
|
||||
.prepare(
|
||||
"UPDATE memories SET content = :content, confidence = :confidence, updated_at = :updated_at WHERE id = :id",
|
||||
)
|
||||
.run({
|
||||
":content": content,
|
||||
":confidence": confidence,
|
||||
":updated_at": updatedAt,
|
||||
":id": id,
|
||||
});
|
||||
} else {
|
||||
currentDb
|
||||
.prepare(
|
||||
"UPDATE memories SET content = :content, updated_at = :updated_at WHERE id = :id",
|
||||
)
|
||||
.run({ ":content": content, ":updated_at": updatedAt, ":id": id });
|
||||
}
|
||||
}
|
||||
|
||||
export function incrementMemoryHitCount(id, updatedAt) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
"UPDATE memories SET hit_count = hit_count + 1, updated_at = :updated_at WHERE id = :id",
|
||||
)
|
||||
.run({ ":updated_at": updatedAt, ":id": id });
|
||||
}
|
||||
|
||||
export function supersedeMemoryRow(oldId, newId, updatedAt) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
"UPDATE memories SET superseded_by = :new_id, updated_at = :updated_at WHERE id = :old_id",
|
||||
)
|
||||
.run({ ":new_id": newId, ":updated_at": updatedAt, ":old_id": oldId });
|
||||
}
|
||||
|
||||
export function markMemoryUnitProcessed(unitKey, activityFile, processedAt) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR IGNORE INTO memory_processed_units (unit_key, activity_file, processed_at)
|
||||
VALUES (:key, :file, :at)`)
|
||||
.run({ ":key": unitKey, ":file": activityFile, ":at": processedAt });
|
||||
}
|
||||
|
||||
export function decayMemoriesBefore(cutoffTs, now) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`UPDATE memories
|
||||
SET confidence = MAX(0.1, confidence - 0.1), updated_at = :now
|
||||
WHERE superseded_by IS NULL AND updated_at < :cutoff AND confidence > 0.1`)
|
||||
.run({ ":now": now, ":cutoff": cutoffTs });
|
||||
}
|
||||
|
||||
export function expireStaleMemories(unstartedTtlDays = 28, maxTtlDays = 90) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return 0;
|
||||
const now = new Date().toISOString();
|
||||
const cutoffUnstarted = new Date(
|
||||
Date.now() - unstartedTtlDays * 86_400_000,
|
||||
).toISOString();
|
||||
const cutoffMax = new Date(
|
||||
Date.now() - maxTtlDays * 86_400_000,
|
||||
).toISOString();
|
||||
const result = currentDb
|
||||
.prepare(`UPDATE memories SET superseded_by = 'ttl-expired', updated_at = :now
|
||||
WHERE superseded_by IS NULL
|
||||
AND (
|
||||
(hit_count = 0 AND updated_at < :cutoff_unstarted)
|
||||
OR updated_at < :cutoff_max
|
||||
)`)
|
||||
.run({
|
||||
":now": now,
|
||||
":cutoff_unstarted": cutoffUnstarted,
|
||||
":cutoff_max": cutoffMax,
|
||||
});
|
||||
return result.changes ?? 0;
|
||||
}
|
||||
|
||||
export function supersedeLowestRankedMemories(limit, now) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`UPDATE memories SET superseded_by = 'CAP_EXCEEDED', updated_at = :now
|
||||
WHERE id IN (
|
||||
SELECT id FROM memories
|
||||
WHERE superseded_by IS NULL
|
||||
ORDER BY (confidence * (1.0 + hit_count * 0.1)) ASC
|
||||
LIMIT :limit
|
||||
)`)
|
||||
.run({ ":now": now, ":limit": limit });
|
||||
}
|
||||
|
||||
export function insertMemorySourceRow(args) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR IGNORE INTO memory_sources (id, kind, uri, title, content, content_hash, imported_at, scope, tags)
|
||||
VALUES (:id, :kind, :uri, :title, :content, :content_hash, :imported_at, :scope, :tags)`)
|
||||
.run({
|
||||
":id": args.id,
|
||||
":kind": args.kind,
|
||||
":uri": args.uri,
|
||||
":title": args.title,
|
||||
":content": args.content,
|
||||
":content_hash": args.contentHash,
|
||||
":imported_at": args.importedAt,
|
||||
":scope": args.scope ?? "project",
|
||||
":tags": JSON.stringify(args.tags ?? []),
|
||||
});
|
||||
}
|
||||
|
||||
export function deleteMemorySourceRow(id) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const res = currentDb
|
||||
.prepare("DELETE FROM memory_sources WHERE id = :id")
|
||||
.run({ ":id": id });
|
||||
return (res?.changes ?? 0) > 0;
|
||||
}
|
||||
|
||||
export function insertJudgment(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
try {
|
||||
currentDb
|
||||
.prepare(`INSERT INTO judgments (unit_id, decision, alternatives_json, reasoning, confidence, ts)
|
||||
VALUES (:unit_id, :decision, :alternatives_json, :reasoning, :confidence, :ts)`)
|
||||
.run({
|
||||
":unit_id": entry.unitId ?? "",
|
||||
":decision": entry.decision ?? "",
|
||||
":alternatives_json": JSON.stringify(entry.alternatives ?? []),
|
||||
":reasoning": entry.reasoning ?? "",
|
||||
":confidence": entry.confidence ?? "medium",
|
||||
":ts": entry.ts ?? new Date().toISOString(),
|
||||
});
|
||||
} catch {
|
||||
// Judgment logging is best-effort
|
||||
}
|
||||
}
|
||||
|
||||
export function getJudgmentsForUnit(unitIdPrefix, limit = 1000) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
try {
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
`SELECT id, unit_id AS unitId, decision, alternatives_json AS alternativesJson, reasoning, confidence, ts
|
||||
FROM judgments
|
||||
WHERE unit_id LIKE :prefix
|
||||
ORDER BY ts DESC
|
||||
LIMIT :limit`,
|
||||
)
|
||||
.all({
|
||||
":prefix": `${unitIdPrefix}%`,
|
||||
":limit": limit,
|
||||
});
|
||||
return rows.map((r) => ({
|
||||
id: r.id,
|
||||
unitId: r.unitId,
|
||||
decision: r.decision,
|
||||
alternatives: parseJsonObject(r.alternativesJson, []),
|
||||
reasoning: r.reasoning,
|
||||
confidence: r.confidence,
|
||||
ts: r.ts,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function insertRetrievalEvidence(args) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const now = args.recordedAt ?? new Date().toISOString();
|
||||
currentDb
|
||||
.prepare(`INSERT INTO retrieval_evidence (
|
||||
backend, source_kind, query, strategy, scope, project_root,
|
||||
git_head, git_branch, worktree_dirty, freshness, status,
|
||||
hit_count, elapsed_ms, cache_path, error, result_json, recorded_at
|
||||
) VALUES (
|
||||
:backend, :source_kind, :query, :strategy, :scope, :project_root,
|
||||
:git_head, :git_branch, :worktree_dirty, :freshness, :status,
|
||||
:hit_count, :elapsed_ms, :cache_path, :error, :result_json, :recorded_at
|
||||
)`)
|
||||
.run({
|
||||
":backend": args.backend,
|
||||
":source_kind": args.sourceKind ?? "code",
|
||||
":query": args.query ?? "",
|
||||
":strategy": args.strategy ?? "",
|
||||
":scope": args.scope ?? "",
|
||||
":project_root": args.projectRoot ?? "",
|
||||
":git_head": args.gitHead ?? null,
|
||||
":git_branch": args.gitBranch ?? null,
|
||||
":worktree_dirty": intBool(args.worktreeDirty),
|
||||
":freshness": args.freshness ?? "unknown",
|
||||
":status": args.status ?? "ok",
|
||||
":hit_count": args.hitCount ?? 0,
|
||||
":elapsed_ms": args.elapsedMs ?? 0,
|
||||
":cache_path": args.cachePath ?? null,
|
||||
":error": args.error ?? null,
|
||||
":result_json": JSON.stringify(args.result ?? {}),
|
||||
":recorded_at": now,
|
||||
});
|
||||
}
|
||||
|
||||
export function getRetrievalEvidence(limit = 100) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const rows = currentDb
|
||||
.prepare(`SELECT
|
||||
id, backend, source_kind AS sourceKind, query, strategy, scope,
|
||||
project_root AS projectRoot, git_head AS gitHead,
|
||||
git_branch AS gitBranch, worktree_dirty AS worktreeDirty,
|
||||
freshness, status, hit_count AS hitCount, elapsed_ms AS elapsedMs,
|
||||
cache_path AS cachePath, error, result_json AS resultJson, recorded_at AS recordedAt
|
||||
FROM retrieval_evidence
|
||||
ORDER BY recorded_at DESC, id DESC
|
||||
LIMIT :limit`)
|
||||
.all({ ":limit": limit });
|
||||
return rows.map((row) => ({
|
||||
...row,
|
||||
worktreeDirty: row.worktreeDirty === 1,
|
||||
result: parseJsonObject(row.resultJson, {}),
|
||||
}));
|
||||
}
|
||||
|
||||
export function upsertMemoryEmbedding(args) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT INTO memory_embeddings (memory_id, model, dim, vector, updated_at)
|
||||
VALUES (:memory_id, :model, :dim, :vector, :updated_at)
|
||||
ON CONFLICT(memory_id) DO UPDATE SET
|
||||
model = excluded.model,
|
||||
dim = excluded.dim,
|
||||
vector = excluded.vector,
|
||||
updated_at = excluded.updated_at`)
|
||||
.run({
|
||||
":memory_id": args.memoryId,
|
||||
":model": args.model,
|
||||
":dim": args.dim,
|
||||
":vector": args.vector,
|
||||
":updated_at": args.updatedAt,
|
||||
});
|
||||
}
|
||||
|
||||
export function deleteMemoryEmbedding(memoryId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const res = currentDb
|
||||
.prepare("DELETE FROM memory_embeddings WHERE memory_id = :id")
|
||||
.run({ ":id": memoryId });
|
||||
return (res?.changes ?? 0) > 0;
|
||||
}
|
||||
427
src/resources/extensions/sf/sf-db/sf-db-milestones.js
Normal file
427
src/resources/extensions/sf/sf-db/sf-db-milestones.js
Normal file
|
|
@ -0,0 +1,427 @@
|
|||
import { _getAdapter, hasPlanningPayload, isEmptyMilestoneSpec, parseJsonOrFallback, insertMilestoneSpecIfAbsent, rowToMilestone, parseVisionMeeting, parseProductResearch, transaction } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { existsSync, readFileSync } from 'node:fs';
|
||||
|
||||
export function insertMilestone(m) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR IGNORE INTO milestones (
|
||||
id, title, status, depends_on, created_at,
|
||||
vision, success_criteria, key_risks, proof_strategy,
|
||||
verification_contract, verification_integration, verification_operational, verification_uat,
|
||||
definition_of_done, requirement_coverage, boundary_map_markdown, vision_meeting_json, product_research_json, sequence
|
||||
) VALUES (
|
||||
:id, :title, :status, :depends_on, :created_at,
|
||||
:vision, :success_criteria, :key_risks, :proof_strategy,
|
||||
:verification_contract, :verification_integration, :verification_operational, :verification_uat,
|
||||
:definition_of_done, :requirement_coverage, :boundary_map_markdown, :vision_meeting_json, :product_research_json, :sequence
|
||||
)`)
|
||||
.run({
|
||||
":id": m.id,
|
||||
":title": m.title ?? "",
|
||||
// Default to "queued" — never auto-create milestones as "active" (#3380).
|
||||
// Callers that need "active" must pass it explicitly.
|
||||
":status": m.status ?? "queued",
|
||||
":depends_on": JSON.stringify(m.depends_on ?? []),
|
||||
":created_at": new Date().toISOString(),
|
||||
":vision": m.planning?.vision ?? "",
|
||||
":success_criteria": JSON.stringify(m.planning?.successCriteria ?? []),
|
||||
":key_risks": JSON.stringify(m.planning?.keyRisks ?? []),
|
||||
":proof_strategy": JSON.stringify(m.planning?.proofStrategy ?? []),
|
||||
":verification_contract": m.planning?.verificationContract ?? "",
|
||||
":verification_integration": m.planning?.verificationIntegration ?? "",
|
||||
":verification_operational": m.planning?.verificationOperational ?? "",
|
||||
":verification_uat": m.planning?.verificationUat ?? "",
|
||||
":definition_of_done": JSON.stringify(m.planning?.definitionOfDone ?? []),
|
||||
":requirement_coverage": m.planning?.requirementCoverage ?? "",
|
||||
":boundary_map_markdown": m.planning?.boundaryMapMarkdown ?? "",
|
||||
":vision_meeting_json": m.planning?.visionMeeting
|
||||
? JSON.stringify(m.planning.visionMeeting)
|
||||
: "",
|
||||
":product_research_json": m.planning?.productResearch
|
||||
? JSON.stringify(m.planning.productResearch)
|
||||
: "",
|
||||
":sequence": m.sequence ?? 0,
|
||||
});
|
||||
if (hasPlanningPayload(m.planning)) {
|
||||
insertMilestoneSpecIfAbsent(m.id, m.planning ?? {});
|
||||
}
|
||||
}
|
||||
|
||||
export function upsertMilestonePlanning(milestoneId, planning) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
insertMilestoneSpecIfAbsent(milestoneId, planning);
|
||||
currentDb
|
||||
.prepare(`UPDATE milestones SET
|
||||
title = COALESCE(NULLIF(:title, ''), title),
|
||||
status = COALESCE(NULLIF(:status, ''), status),
|
||||
vision = COALESCE(:vision, vision),
|
||||
success_criteria = COALESCE(:success_criteria, success_criteria),
|
||||
key_risks = COALESCE(:key_risks, key_risks),
|
||||
proof_strategy = COALESCE(:proof_strategy, proof_strategy),
|
||||
verification_contract = COALESCE(:verification_contract, verification_contract),
|
||||
verification_integration = COALESCE(:verification_integration, verification_integration),
|
||||
verification_operational = COALESCE(:verification_operational, verification_operational),
|
||||
verification_uat = COALESCE(:verification_uat, verification_uat),
|
||||
definition_of_done = COALESCE(:definition_of_done, definition_of_done),
|
||||
requirement_coverage = COALESCE(:requirement_coverage, requirement_coverage),
|
||||
boundary_map_markdown = COALESCE(:boundary_map_markdown, boundary_map_markdown),
|
||||
vision_meeting_json = COALESCE(:vision_meeting_json, vision_meeting_json),
|
||||
product_research_json = COALESCE(:product_research_json, product_research_json)
|
||||
WHERE id = :id`)
|
||||
.run({
|
||||
":id": milestoneId,
|
||||
":title": planning.title ?? "",
|
||||
":status": planning.status ?? "",
|
||||
":vision": planning.vision ?? null,
|
||||
":success_criteria": planning.successCriteria
|
||||
? JSON.stringify(planning.successCriteria)
|
||||
: null,
|
||||
":key_risks": planning.keyRisks
|
||||
? JSON.stringify(planning.keyRisks)
|
||||
: null,
|
||||
":proof_strategy": planning.proofStrategy
|
||||
? JSON.stringify(planning.proofStrategy)
|
||||
: null,
|
||||
":verification_contract": planning.verificationContract ?? null,
|
||||
":verification_integration": planning.verificationIntegration ?? null,
|
||||
":verification_operational": planning.verificationOperational ?? null,
|
||||
":verification_uat": planning.verificationUat ?? null,
|
||||
":definition_of_done": planning.definitionOfDone
|
||||
? JSON.stringify(planning.definitionOfDone)
|
||||
: null,
|
||||
":requirement_coverage": planning.requirementCoverage ?? null,
|
||||
":boundary_map_markdown": planning.boundaryMapMarkdown ?? null,
|
||||
":vision_meeting_json": planning.visionMeeting
|
||||
? JSON.stringify(planning.visionMeeting)
|
||||
: null,
|
||||
":product_research_json": planning.productResearch
|
||||
? JSON.stringify(planning.productResearch)
|
||||
: null,
|
||||
});
|
||||
}
|
||||
|
||||
export function getAllMilestones() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM milestones ORDER BY CASE WHEN sequence > 0 THEN 0 ELSE 1 END, sequence, id",
|
||||
)
|
||||
.all();
|
||||
return rows.map(rowToMilestone);
|
||||
}
|
||||
|
||||
export function getMilestone(id) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare("SELECT * FROM milestones WHERE id = :id")
|
||||
.get({ ":id": id });
|
||||
if (!row) return null;
|
||||
return rowToMilestone(row);
|
||||
}
|
||||
|
||||
export function updateMilestoneStatus(milestoneId, status, completedAt) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`UPDATE milestones SET status = :status, completed_at = :completed_at WHERE id = :id`,
|
||||
)
|
||||
.run({
|
||||
":status": status,
|
||||
":completed_at": completedAt ?? null,
|
||||
":id": milestoneId,
|
||||
});
|
||||
}
|
||||
|
||||
export function updateMilestoneQueueOrder(order) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
transaction(() => {
|
||||
const stmt = currentDb.prepare(
|
||||
"UPDATE milestones SET sequence = :sequence WHERE id = :id",
|
||||
);
|
||||
for (let i = 0; i < order.length; i++) {
|
||||
stmt.run({ ":sequence": i + 1, ":id": order[i] });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function getActiveMilestoneFromDb() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM milestones WHERE status NOT IN ('complete', 'parked') ORDER BY CASE WHEN sequence > 0 THEN 0 ELSE 1 END, sequence, id LIMIT 1",
|
||||
)
|
||||
.get();
|
||||
if (!row) return null;
|
||||
return rowToMilestone(row);
|
||||
}
|
||||
|
||||
export function getActiveMilestoneIdFromDb() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
"SELECT id, status FROM milestones WHERE status NOT IN ('complete', 'parked') ORDER BY id LIMIT 1",
|
||||
)
|
||||
.get();
|
||||
if (!row) return null;
|
||||
return { id: row["id"], status: row["status"] };
|
||||
}
|
||||
|
||||
export function deleteMilestone(milestoneId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
transaction(() => {
|
||||
currentDb
|
||||
.prepare(`DELETE FROM verification_evidence WHERE milestone_id = :mid`)
|
||||
.run({ ":mid": milestoneId });
|
||||
currentDb
|
||||
.prepare(`DELETE FROM quality_gates WHERE milestone_id = :mid`)
|
||||
.run({ ":mid": milestoneId });
|
||||
currentDb
|
||||
.prepare(`DELETE FROM tasks WHERE milestone_id = :mid`)
|
||||
.run({ ":mid": milestoneId });
|
||||
currentDb
|
||||
.prepare(`DELETE FROM slice_dependencies WHERE milestone_id = :mid`)
|
||||
.run({ ":mid": milestoneId });
|
||||
currentDb
|
||||
.prepare(`DELETE FROM slices WHERE milestone_id = :mid`)
|
||||
.run({ ":mid": milestoneId });
|
||||
currentDb
|
||||
.prepare(`DELETE FROM replan_history WHERE milestone_id = :mid`)
|
||||
.run({ ":mid": milestoneId });
|
||||
currentDb
|
||||
.prepare(`DELETE FROM assessments WHERE milestone_id = :mid`)
|
||||
.run({ ":mid": milestoneId });
|
||||
currentDb
|
||||
.prepare(`DELETE FROM artifacts WHERE milestone_id = :mid`)
|
||||
.run({ ":mid": milestoneId });
|
||||
currentDb
|
||||
.prepare(`DELETE FROM milestones WHERE id = :mid`)
|
||||
.run({ ":mid": milestoneId });
|
||||
});
|
||||
}
|
||||
|
||||
export function bulkInsertLegacyHierarchy(payload) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const db = currentDb;
|
||||
const { milestones, slices, tasks, clearMilestoneIds, createdAt } = payload;
|
||||
if (clearMilestoneIds.length === 0) return;
|
||||
const placeholders = clearMilestoneIds.map(() => "?").join(",");
|
||||
transaction(() => {
|
||||
db.prepare(`DELETE FROM tasks WHERE milestone_id IN (${placeholders})`).run(
|
||||
...clearMilestoneIds,
|
||||
);
|
||||
db.prepare(
|
||||
`DELETE FROM slices WHERE milestone_id IN (${placeholders})`,
|
||||
).run(...clearMilestoneIds);
|
||||
db.prepare(`DELETE FROM milestones WHERE id IN (${placeholders})`).run(
|
||||
...clearMilestoneIds,
|
||||
);
|
||||
const insertMilestone = db.prepare(
|
||||
"INSERT INTO milestones (id, title, status, created_at) VALUES (?, ?, ?, ?)",
|
||||
);
|
||||
for (const m of milestones) {
|
||||
insertMilestone.run(m.id, m.title, m.status, createdAt);
|
||||
}
|
||||
const insertSliceStmt = db.prepare(
|
||||
"INSERT INTO slices (id, milestone_id, title, status, risk, depends, sequence, created_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
);
|
||||
for (const s of slices) {
|
||||
insertSliceStmt.run(
|
||||
s.id,
|
||||
s.milestoneId,
|
||||
s.title,
|
||||
s.status,
|
||||
s.risk,
|
||||
"[]",
|
||||
s.sequence,
|
||||
createdAt,
|
||||
);
|
||||
}
|
||||
const insertTaskStmt = db.prepare(
|
||||
"INSERT INTO tasks (id, slice_id, milestone_id, title, description, status, estimate, files, sequence) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
);
|
||||
for (const t of tasks) {
|
||||
insertTaskStmt.run(
|
||||
t.id,
|
||||
t.sliceId,
|
||||
t.milestoneId,
|
||||
t.title,
|
||||
"",
|
||||
t.status,
|
||||
"",
|
||||
"[]",
|
||||
t.sequence,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function clearEngineHierarchy() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
transaction(() => {
|
||||
currentDb.exec("DELETE FROM tasks");
|
||||
currentDb.exec("DELETE FROM slices");
|
||||
currentDb.exec("DELETE FROM milestones");
|
||||
});
|
||||
}
|
||||
|
||||
export function restoreManifest(manifest) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const db = currentDb;
|
||||
transaction(() => {
|
||||
// Clear engine tables (order matters for foreign-key-like consistency)
|
||||
db.exec("DELETE FROM verification_evidence");
|
||||
db.exec("DELETE FROM tasks");
|
||||
db.exec("DELETE FROM slices");
|
||||
db.exec("DELETE FROM milestones");
|
||||
db.exec("DELETE FROM decisions WHERE 1=1");
|
||||
// Restore milestones
|
||||
const msStmt =
|
||||
db.prepare(`INSERT INTO milestones (id, title, status, depends_on, created_at, completed_at,
|
||||
vision, success_criteria, key_risks, proof_strategy,
|
||||
verification_contract, verification_integration, verification_operational, verification_uat,
|
||||
definition_of_done, requirement_coverage, boundary_map_markdown, vision_meeting_json, product_research_json)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
|
||||
for (const m of manifest.milestones) {
|
||||
msStmt.run(
|
||||
m.id,
|
||||
m.title,
|
||||
m.status,
|
||||
JSON.stringify(m.depends_on),
|
||||
m.created_at,
|
||||
m.completed_at,
|
||||
m.vision,
|
||||
JSON.stringify(m.success_criteria),
|
||||
JSON.stringify(m.key_risks),
|
||||
JSON.stringify(m.proof_strategy),
|
||||
m.verification_contract,
|
||||
m.verification_integration,
|
||||
m.verification_operational,
|
||||
m.verification_uat,
|
||||
JSON.stringify(m.definition_of_done),
|
||||
m.requirement_coverage,
|
||||
m.boundary_map_markdown,
|
||||
m.vision_meeting ? JSON.stringify(m.vision_meeting) : "",
|
||||
m.product_research ? JSON.stringify(m.product_research) : "",
|
||||
);
|
||||
}
|
||||
// Restore slices
|
||||
const slStmt =
|
||||
db.prepare(`INSERT INTO slices (milestone_id, id, title, status, risk, depends, demo,
|
||||
created_at, completed_at, full_summary_md, full_uat_md,
|
||||
goal, success_criteria, proof_level, integration_closure, observability_impact,
|
||||
adversarial_partner, adversarial_combatant, adversarial_architect, planning_meeting_json,
|
||||
sequence, replan_triggered_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
|
||||
for (const s of manifest.slices) {
|
||||
slStmt.run(
|
||||
s.milestone_id,
|
||||
s.id,
|
||||
s.title,
|
||||
s.status,
|
||||
s.risk,
|
||||
JSON.stringify(s.depends),
|
||||
s.demo,
|
||||
s.created_at,
|
||||
s.completed_at,
|
||||
s.full_summary_md,
|
||||
s.full_uat_md,
|
||||
s.goal,
|
||||
s.success_criteria,
|
||||
s.proof_level,
|
||||
s.integration_closure,
|
||||
s.observability_impact,
|
||||
s.adversarial_partner ?? "",
|
||||
s.adversarial_combatant ?? "",
|
||||
s.adversarial_architect ?? "",
|
||||
s.planning_meeting ? JSON.stringify(s.planning_meeting) : "",
|
||||
s.sequence,
|
||||
s.replan_triggered_at,
|
||||
);
|
||||
}
|
||||
// Restore tasks
|
||||
const tkStmt =
|
||||
db.prepare(`INSERT INTO tasks (milestone_id, slice_id, id, title, status,
|
||||
one_liner, narrative, verification_result, duration, completed_at,
|
||||
blocker_discovered, deviations, known_issues, key_files, key_decisions,
|
||||
full_summary_md, description, estimate, files, verify,
|
||||
inputs, expected_output, observability_impact, full_plan_md, sequence)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
|
||||
for (const t of manifest.tasks) {
|
||||
tkStmt.run(
|
||||
t.milestone_id,
|
||||
t.slice_id,
|
||||
t.id,
|
||||
t.title,
|
||||
t.status,
|
||||
t.one_liner,
|
||||
t.narrative,
|
||||
t.verification_result,
|
||||
t.duration,
|
||||
t.completed_at,
|
||||
t.blocker_discovered ? 1 : 0,
|
||||
t.deviations,
|
||||
t.known_issues,
|
||||
JSON.stringify(t.key_files),
|
||||
JSON.stringify(t.key_decisions),
|
||||
t.full_summary_md,
|
||||
t.description,
|
||||
t.estimate,
|
||||
JSON.stringify(t.files),
|
||||
t.verify,
|
||||
JSON.stringify(t.inputs),
|
||||
JSON.stringify(t.expected_output),
|
||||
t.observability_impact,
|
||||
t.full_plan_md,
|
||||
t.sequence,
|
||||
);
|
||||
}
|
||||
// Restore decisions
|
||||
const dcStmt =
|
||||
db.prepare(`INSERT INTO decisions (seq, id, when_context, scope, decision, choice, rationale, revisable, made_by, superseded_by)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`);
|
||||
for (const d of manifest.decisions) {
|
||||
dcStmt.run(
|
||||
d.seq,
|
||||
d.id,
|
||||
d.when_context,
|
||||
d.scope,
|
||||
d.decision,
|
||||
d.choice,
|
||||
d.rationale,
|
||||
d.revisable,
|
||||
d.made_by,
|
||||
d.superseded_by,
|
||||
);
|
||||
}
|
||||
// Restore verification evidence
|
||||
const evStmt =
|
||||
db.prepare(`INSERT INTO verification_evidence (task_id, slice_id, milestone_id, command, exit_code, verdict, duration_ms, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`);
|
||||
for (const e of manifest.verification_evidence) {
|
||||
evStmt.run(
|
||||
e.task_id,
|
||||
e.slice_id,
|
||||
e.milestone_id,
|
||||
e.command,
|
||||
e.exit_code,
|
||||
e.verdict,
|
||||
e.duration_ms,
|
||||
e.created_at,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
49
src/resources/extensions/sf/sf-db/sf-db-mode-state.js
Normal file
49
src/resources/extensions/sf/sf-db/sf-db-mode-state.js
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import { _getAdapter } from './sf-db-core.js';
|
||||
|
||||
export function loadSessionModeState() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
try {
|
||||
const row = currentDb
|
||||
.prepare("SELECT * FROM session_mode_state WHERE id = 1")
|
||||
.get();
|
||||
if (!row) return null;
|
||||
return {
|
||||
workMode: row["work_mode"] ?? "chat",
|
||||
runControl: row["run_control"] ?? "manual",
|
||||
permissionProfile: row["permission_profile"] ?? "restricted",
|
||||
modelMode: row["model_mode"] ?? "smart",
|
||||
surface: row["surface"] ?? "tui",
|
||||
updatedAt: row["updated_at"] ?? null,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function saveSessionModeState(mode) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return false;
|
||||
currentDb
|
||||
.prepare(`
|
||||
INSERT INTO session_mode_state (id, work_mode, run_control, permission_profile, model_mode, surface, updated_at)
|
||||
VALUES (1, :workMode, :runControl, :permissionProfile, :modelMode, :surface, :updatedAt)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
work_mode = excluded.work_mode,
|
||||
run_control = excluded.run_control,
|
||||
permission_profile = excluded.permission_profile,
|
||||
model_mode = excluded.model_mode,
|
||||
surface = excluded.surface,
|
||||
updated_at = excluded.updated_at
|
||||
`)
|
||||
.run({
|
||||
":workMode": mode.workMode,
|
||||
":runControl": mode.runControl,
|
||||
":permissionProfile": mode.permissionProfile,
|
||||
":modelMode": mode.modelMode,
|
||||
":surface": mode.surface ?? "tui",
|
||||
":updatedAt": mode.updatedAt ?? new Date().toISOString(),
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
347
src/resources/extensions/sf/sf-db/sf-db-profile.js
Normal file
347
src/resources/extensions/sf/sf-db/sf-db-profile.js
Normal file
|
|
@ -0,0 +1,347 @@
|
|||
import { _getAdapter, normalizeScheduleScope, scheduleEntryFromRow, asStringOrNull, transaction } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { logWarning } from '../workflow-logger.js';
|
||||
|
||||
export function recordRepoProfile(profile) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
transaction(() => {
|
||||
currentDb
|
||||
.prepare(`INSERT OR REPLACE INTO repo_profiles (
|
||||
profile_id, project_hash, project_root, head, branch, remote_hash,
|
||||
dirty, profile_json, created_at
|
||||
) VALUES (
|
||||
:profile_id, :project_hash, :project_root, :head, :branch, :remote_hash,
|
||||
:dirty, :profile_json, :created_at
|
||||
)`)
|
||||
.run({
|
||||
":profile_id": profile.profileId,
|
||||
":project_hash": profile.projectHash,
|
||||
":project_root": profile.projectRoot,
|
||||
":head": profile.git.head,
|
||||
":branch": profile.git.branch,
|
||||
":remote_hash": profile.git.remoteHash,
|
||||
":dirty": profile.git.dirty ? 1 : 0,
|
||||
":profile_json": JSON.stringify(profile),
|
||||
":created_at": profile.createdAt,
|
||||
});
|
||||
const stmt = currentDb.prepare(`INSERT INTO repo_file_observations (
|
||||
path, latest_profile_id, git_status, ownership, language, size_bytes,
|
||||
content_hash, summary, first_seen_at, last_seen_at, adopted_at,
|
||||
adoption_unit_id
|
||||
) VALUES (
|
||||
:path, :latest_profile_id, :git_status, :ownership, :language, :size_bytes,
|
||||
:content_hash, :summary, :first_seen_at, :last_seen_at, :adopted_at,
|
||||
:adoption_unit_id
|
||||
)
|
||||
ON CONFLICT(path) DO UPDATE SET
|
||||
latest_profile_id = excluded.latest_profile_id,
|
||||
git_status = excluded.git_status,
|
||||
ownership = CASE
|
||||
WHEN repo_file_observations.ownership = 'sf_generated'
|
||||
THEN repo_file_observations.ownership
|
||||
WHEN repo_file_observations.ownership = 'candidate_harness'
|
||||
THEN repo_file_observations.ownership
|
||||
ELSE excluded.ownership
|
||||
END,
|
||||
language = excluded.language,
|
||||
size_bytes = excluded.size_bytes,
|
||||
content_hash = excluded.content_hash,
|
||||
summary = excluded.summary,
|
||||
first_seen_at = repo_file_observations.first_seen_at,
|
||||
last_seen_at = excluded.last_seen_at,
|
||||
adopted_at = COALESCE(repo_file_observations.adopted_at, excluded.adopted_at),
|
||||
adoption_unit_id = COALESCE(repo_file_observations.adoption_unit_id, excluded.adoption_unit_id)`);
|
||||
for (const file of profile.git.changedFiles) {
|
||||
stmt.run({
|
||||
":path": file.path,
|
||||
":latest_profile_id": profile.profileId,
|
||||
":git_status": file.gitStatus,
|
||||
":ownership": file.ownership,
|
||||
":language": file.language,
|
||||
":size_bytes": file.sizeBytes,
|
||||
":content_hash": file.contentHash,
|
||||
":summary": file.summary,
|
||||
":first_seen_at": file.firstSeenAt,
|
||||
":last_seen_at": file.lastSeenAt,
|
||||
":adopted_at": file.adoptedAt,
|
||||
":adoption_unit_id": file.adoptionUnitId,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function getLatestRepoProfile() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const row = currentDb
|
||||
.prepare(`SELECT profile_id, project_hash, project_root, head, branch, remote_hash,
|
||||
dirty, profile_json, created_at
|
||||
FROM repo_profiles
|
||||
ORDER BY created_at DESC, profile_id DESC
|
||||
LIMIT 1`)
|
||||
.get();
|
||||
if (!row) return null;
|
||||
return {
|
||||
profileId: row["profile_id"],
|
||||
projectHash: row["project_hash"],
|
||||
projectRoot: row["project_root"],
|
||||
head: asStringOrNull(row["head"]),
|
||||
branch: asStringOrNull(row["branch"]),
|
||||
remoteHash: asStringOrNull(row["remote_hash"]),
|
||||
dirty: row["dirty"] === 1,
|
||||
profileJson: row["profile_json"] ?? "{}",
|
||||
createdAt: row["created_at"],
|
||||
};
|
||||
}
|
||||
|
||||
export function getRepoFileObservations() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
return currentDb
|
||||
.prepare(`SELECT path, latest_profile_id, git_status, ownership, language,
|
||||
size_bytes, content_hash, summary, first_seen_at, last_seen_at,
|
||||
adopted_at, adoption_unit_id
|
||||
FROM repo_file_observations
|
||||
ORDER BY path ASC`)
|
||||
.all()
|
||||
.map((row) => ({
|
||||
path: row["path"],
|
||||
latestProfileId: row["latest_profile_id"],
|
||||
gitStatus: row["git_status"],
|
||||
ownership: row["ownership"],
|
||||
language: asStringOrNull(row["language"]),
|
||||
sizeBytes: row["size_bytes"] ?? 0,
|
||||
contentHash: asStringOrNull(row["content_hash"]),
|
||||
summary: asStringOrNull(row["summary"]),
|
||||
firstSeenAt: row["first_seen_at"],
|
||||
lastSeenAt: row["last_seen_at"],
|
||||
adoptedAt: asStringOrNull(row["adopted_at"]),
|
||||
adoptionUnitId: asStringOrNull(row["adoption_unit_id"]),
|
||||
}));
|
||||
}
|
||||
|
||||
export function insertScheduleEntry(scope, entry, importedFrom = null) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
const normalizedScope = normalizeScheduleScope(scope);
|
||||
const schemaVersion = entry.schemaVersion ?? 1;
|
||||
const full = { schemaVersion, ...entry };
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO schedule_entries (
|
||||
scope, id, schema_version, kind, status, due_at, created_at,
|
||||
snoozed_at, payload_json, created_by, autonomous_dispatch, full_json,
|
||||
imported_from
|
||||
) VALUES (
|
||||
:scope, :id, :schema_version, :kind, :status, :due_at, :created_at,
|
||||
:snoozed_at, :payload_json, :created_by, :autonomous_dispatch, :full_json,
|
||||
:imported_from
|
||||
)`,
|
||||
)
|
||||
.run({
|
||||
":scope": normalizedScope,
|
||||
":id": entry.id,
|
||||
":schema_version": schemaVersion,
|
||||
":kind": entry.kind ?? "reminder",
|
||||
":status": entry.status ?? "pending",
|
||||
":due_at": entry.due_at ?? "",
|
||||
":created_at": entry.created_at ?? "",
|
||||
":snoozed_at": entry.snoozed_at ?? null,
|
||||
":payload_json": JSON.stringify(entry.payload ?? {}),
|
||||
":created_by": entry.created_by ?? "user",
|
||||
":autonomous_dispatch": entry.autonomous_dispatch ? 1 : 0,
|
||||
":full_json": JSON.stringify(full),
|
||||
":imported_from": importedFrom,
|
||||
});
|
||||
}
|
||||
|
||||
export function getScheduleEntries(scope) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const normalizedScope = normalizeScheduleScope(scope);
|
||||
try {
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
`SELECT s.*
|
||||
FROM schedule_entries s
|
||||
JOIN (
|
||||
SELECT id, MAX(seq) AS max_seq
|
||||
FROM schedule_entries
|
||||
WHERE scope = :scope
|
||||
GROUP BY id
|
||||
) latest ON latest.id = s.id AND latest.max_seq = s.seq
|
||||
WHERE s.scope = :scope
|
||||
ORDER BY s.due_at ASC, s.created_at ASC, s.seq ASC`,
|
||||
)
|
||||
.all({ ":scope": normalizedScope });
|
||||
return rows.map(scheduleEntryFromRow).filter(Boolean);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function countScheduleEntries(scope) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return 0;
|
||||
const normalizedScope = normalizeScheduleScope(scope);
|
||||
try {
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
"SELECT COUNT(*) AS cnt FROM schedule_entries WHERE scope = :scope",
|
||||
)
|
||||
.get({ ":scope": normalizedScope });
|
||||
return row?.cnt ?? 0;
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
export function getRuntimeCounter(key) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return 0;
|
||||
const row = currentDb
|
||||
.prepare("SELECT value FROM runtime_counters WHERE key = ?")
|
||||
.get(key);
|
||||
return typeof row?.value === "number" ? row.value : 0;
|
||||
}
|
||||
|
||||
export function setRuntimeCounter(key, value) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO runtime_counters (key, value, updated_at)
|
||||
VALUES (:key, :value, :updated_at)
|
||||
ON CONFLICT(key) DO UPDATE SET value = excluded.value, updated_at = excluded.updated_at`,
|
||||
)
|
||||
.run({
|
||||
":key": key,
|
||||
":value": value,
|
||||
":updated_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function incrementRuntimeCounter(key) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO runtime_counters (key, value, updated_at)
|
||||
VALUES (:key, 1, :updated_at)
|
||||
ON CONFLICT(key) DO UPDATE SET value = value + 1, updated_at = excluded.updated_at`,
|
||||
)
|
||||
.run({ ":key": key, ":updated_at": new Date().toISOString() });
|
||||
const row = currentDb
|
||||
.prepare("SELECT value FROM runtime_counters WHERE key = ?")
|
||||
.get(key);
|
||||
return typeof row?.value === "number" ? row.value : 1;
|
||||
}
|
||||
|
||||
export function getProjectStartedAt(db) {
|
||||
const row = db
|
||||
.prepare(
|
||||
"SELECT value FROM project_metrics_meta WHERE key = 'projectStartedAt'",
|
||||
)
|
||||
.get();
|
||||
if (!row) return null;
|
||||
const ts = Number(row["value"]);
|
||||
return Number.isFinite(ts) ? ts : null;
|
||||
}
|
||||
|
||||
export function setProjectStartedAt(db, ts) {
|
||||
db.prepare(
|
||||
`INSERT INTO project_metrics_meta (key, value) VALUES ('projectStartedAt', :value)
|
||||
ON CONFLICT(key) DO UPDATE SET value = excluded.value`,
|
||||
).run({ ":value": String(ts) });
|
||||
}
|
||||
|
||||
export function openIntentChapter({
|
||||
id,
|
||||
unitType,
|
||||
unitId,
|
||||
milestoneId,
|
||||
sliceId,
|
||||
taskId,
|
||||
intent,
|
||||
metadata,
|
||||
}) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const now = new Date().toISOString();
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO intent_chapters
|
||||
(id, unit_type, unit_id, milestone_id, slice_id, task_id, intent, opened_at, metadata_json)
|
||||
VALUES
|
||||
(:id, :unitType, :unitId, :milestoneId, :sliceId, :taskId, :intent, :openedAt, :metadataJson)
|
||||
ON CONFLICT(id) DO NOTHING`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":unitType": unitType,
|
||||
":unitId": unitId,
|
||||
":milestoneId": milestoneId ?? null,
|
||||
":sliceId": sliceId ?? null,
|
||||
":taskId": taskId ?? null,
|
||||
":intent": intent,
|
||||
":openedAt": now,
|
||||
":metadataJson": metadata ? JSON.stringify(metadata) : null,
|
||||
});
|
||||
return id;
|
||||
}
|
||||
|
||||
export function closeIntentChapter(id, outcome = "done") {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const res = currentDb
|
||||
.prepare(
|
||||
`UPDATE intent_chapters
|
||||
SET closed_at = :closedAt, outcome = :outcome
|
||||
WHERE id = :id AND closed_at IS NULL`,
|
||||
)
|
||||
.run({
|
||||
":id": id,
|
||||
":closedAt": new Date().toISOString(),
|
||||
":outcome": outcome,
|
||||
});
|
||||
return (res?.changes ?? 0) > 0;
|
||||
}
|
||||
|
||||
export function getOpenIntentChapters({ limit = 5 } = {}) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(
|
||||
`SELECT id, unit_type as unitType, unit_id as unitId,
|
||||
milestone_id as milestoneId, slice_id as sliceId, task_id as taskId,
|
||||
intent, opened_at as openedAt, metadata_json as metadataJson
|
||||
FROM intent_chapters
|
||||
WHERE closed_at IS NULL
|
||||
ORDER BY opened_at DESC
|
||||
LIMIT :limit`,
|
||||
)
|
||||
.all({ ":limit": limit });
|
||||
}
|
||||
|
||||
export function closeIntentChaptersForUnit(
|
||||
unitType,
|
||||
unitId,
|
||||
outcome = "cancelled",
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return 0;
|
||||
const res = currentDb
|
||||
.prepare(
|
||||
`UPDATE intent_chapters
|
||||
SET closed_at = :closedAt, outcome = :outcome
|
||||
WHERE unit_type = :unitType AND unit_id = :unitId AND closed_at IS NULL`,
|
||||
)
|
||||
.run({
|
||||
":closedAt": new Date().toISOString(),
|
||||
":outcome": outcome,
|
||||
":unitType": unitType,
|
||||
":unitId": unitId,
|
||||
});
|
||||
return res?.changes ?? 0;
|
||||
}
|
||||
98
src/resources/extensions/sf/sf-db/sf-db-self-feedback.js
Normal file
98
src/resources/extensions/sf/sf-db/sf-db-self-feedback.js
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
import { _getAdapter, rowToSelfFeedback } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { logWarning } from '../workflow-logger.js';
|
||||
|
||||
export function insertSelfFeedbackEntry(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const occurred = entry.occurredIn ?? {};
|
||||
currentDb
|
||||
.prepare(`INSERT INTO self_feedback (
|
||||
id, ts, kind, severity, blocking, repo_identity, sf_version, base_path,
|
||||
unit_type, milestone_id, slice_id, task_id, summary, evidence, suggested_fix, full_json,
|
||||
resolved_at, resolved_reason, resolved_by_sf_version, resolved_evidence_json, resolved_criteria_json
|
||||
) VALUES (
|
||||
:id, :ts, :kind, :severity, :blocking, :repo_identity, :sf_version, :base_path,
|
||||
:unit_type, :milestone_id, :slice_id, :task_id, :summary, :evidence, :suggested_fix, :full_json,
|
||||
:resolved_at, :resolved_reason, :resolved_by_sf_version, :resolved_evidence_json, :resolved_criteria_json
|
||||
)
|
||||
ON CONFLICT(id) DO NOTHING`)
|
||||
.run({
|
||||
":id": entry.id,
|
||||
":ts": entry.ts,
|
||||
":kind": entry.kind,
|
||||
":severity": entry.severity,
|
||||
":blocking": entry.blocking ? 1 : 0,
|
||||
":repo_identity": entry.repoIdentity ?? "",
|
||||
":sf_version": entry.sfVersion ?? "",
|
||||
":base_path": entry.basePath ?? "",
|
||||
":unit_type": occurred.unitType ?? null,
|
||||
":milestone_id": occurred.milestone ?? null,
|
||||
":slice_id": occurred.slice ?? null,
|
||||
":task_id": occurred.task ?? null,
|
||||
":summary": entry.summary ?? "",
|
||||
":evidence": entry.evidence ?? "",
|
||||
":suggested_fix": entry.suggestedFix ?? "",
|
||||
":full_json": JSON.stringify(entry),
|
||||
":resolved_at": entry.resolvedAt ?? null,
|
||||
":resolved_reason": entry.resolvedReason ?? null,
|
||||
":resolved_by_sf_version": entry.resolvedBySfVersion ?? null,
|
||||
":resolved_evidence_json": entry.resolvedEvidence
|
||||
? JSON.stringify(entry.resolvedEvidence)
|
||||
: null,
|
||||
":resolved_criteria_json": entry.resolvedCriteriaMet
|
||||
? JSON.stringify(entry.resolvedCriteriaMet)
|
||||
: null,
|
||||
});
|
||||
}
|
||||
|
||||
export function listSelfFeedbackEntries() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const rows = currentDb
|
||||
.prepare("SELECT * FROM self_feedback ORDER BY ts ASC, id ASC")
|
||||
.all();
|
||||
return rows.map(rowToSelfFeedback);
|
||||
}
|
||||
|
||||
export function resolveSelfFeedbackEntry(entryId, resolution) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const existing = currentDb
|
||||
.prepare("SELECT * FROM self_feedback WHERE id = :id")
|
||||
.get({ ":id": entryId });
|
||||
if (!existing || existing["resolved_at"]) return false;
|
||||
const resolvedAt = resolution.resolvedAt ?? new Date().toISOString();
|
||||
const entry = {
|
||||
...rowToSelfFeedback(existing),
|
||||
resolvedAt,
|
||||
resolvedReason: resolution.reason,
|
||||
resolvedBySfVersion: resolution.resolvedBySfVersion ?? "",
|
||||
resolvedEvidence: resolution.evidence,
|
||||
};
|
||||
if (resolution.criteriaMet)
|
||||
entry.resolvedCriteriaMet = resolution.criteriaMet;
|
||||
const result = currentDb
|
||||
.prepare(`UPDATE self_feedback SET
|
||||
full_json = :full_json,
|
||||
resolved_at = :resolved_at,
|
||||
resolved_reason = :resolved_reason,
|
||||
resolved_by_sf_version = :resolved_by_sf_version,
|
||||
resolved_evidence_json = :resolved_evidence_json,
|
||||
resolved_criteria_json = :resolved_criteria_json
|
||||
WHERE id = :id AND resolved_at IS NULL`)
|
||||
.run({
|
||||
":id": entryId,
|
||||
":full_json": JSON.stringify(entry),
|
||||
":resolved_at": resolvedAt,
|
||||
":resolved_reason": resolution.reason ?? "",
|
||||
":resolved_by_sf_version": resolution.resolvedBySfVersion ?? "",
|
||||
":resolved_evidence_json": resolution.evidence
|
||||
? JSON.stringify(resolution.evidence)
|
||||
: null,
|
||||
":resolved_criteria_json": resolution.criteriaMet
|
||||
? JSON.stringify(resolution.criteriaMet)
|
||||
: null,
|
||||
});
|
||||
return result.changes > 0;
|
||||
}
|
||||
191
src/resources/extensions/sf/sf-db/sf-db-session-store.js
Normal file
191
src/resources/extensions/sf/sf-db/sf-db-session-store.js
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
import { _getAdapter } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { logWarning, logError } from '../workflow-logger.js';
|
||||
|
||||
export function upsertSession(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
const now = new Date().toISOString();
|
||||
currentDb
|
||||
.prepare(`INSERT INTO sessions
|
||||
(session_id, trace_id, mode, cwd, repo, branch, summary, summary_count, created_at, updated_at)
|
||||
VALUES (:session_id, :trace_id, :mode, :cwd, :repo, :branch, :summary, 0, :now, :now)
|
||||
ON CONFLICT(session_id) DO UPDATE SET
|
||||
trace_id = COALESCE(excluded.trace_id, sessions.trace_id),
|
||||
repo = COALESCE(excluded.repo, sessions.repo),
|
||||
branch = COALESCE(excluded.branch, sessions.branch),
|
||||
summary = COALESCE(excluded.summary, sessions.summary),
|
||||
summary_count = CASE WHEN excluded.summary IS NOT NULL
|
||||
THEN sessions.summary_count + 1
|
||||
ELSE sessions.summary_count END,
|
||||
updated_at = excluded.updated_at`)
|
||||
.run({
|
||||
":session_id": entry.sessionId,
|
||||
":trace_id": entry.traceId ?? null,
|
||||
":mode": entry.mode ?? "interactive",
|
||||
":cwd": entry.cwd ?? "",
|
||||
":repo": entry.repo ?? null,
|
||||
":branch": entry.branch ?? null,
|
||||
":summary": entry.summary ?? null,
|
||||
":now": now,
|
||||
});
|
||||
}
|
||||
|
||||
export function archiveSession(sessionId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(
|
||||
`UPDATE sessions SET archived_at = :now, updated_at = :now WHERE session_id = :session_id`,
|
||||
)
|
||||
.run({ ":session_id": sessionId, ":now": new Date().toISOString() });
|
||||
}
|
||||
|
||||
export function unarchiveSession(sessionId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(
|
||||
`UPDATE sessions SET archived_at = NULL, updated_at = :now WHERE session_id = :session_id`,
|
||||
)
|
||||
.run({ ":session_id": sessionId, ":now": new Date().toISOString() });
|
||||
}
|
||||
|
||||
export function insertSessionTurn(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const result = currentDb
|
||||
.prepare(`INSERT INTO turns
|
||||
(session_id, turn_index, user_message, assistant_response, ts)
|
||||
VALUES (:session_id, :turn_index, :user_message, :assistant_response, :ts)
|
||||
ON CONFLICT(session_id, turn_index) DO UPDATE SET
|
||||
user_message = COALESCE(excluded.user_message, turns.user_message),
|
||||
assistant_response = COALESCE(excluded.assistant_response, turns.assistant_response)`)
|
||||
.run({
|
||||
":session_id": entry.sessionId,
|
||||
":turn_index": entry.turnIndex,
|
||||
":user_message": entry.userMessage ?? null,
|
||||
":assistant_response": entry.assistantResponse ?? null,
|
||||
":ts": entry.ts ?? new Date().toISOString(),
|
||||
});
|
||||
return result.lastInsertRowid ?? null;
|
||||
}
|
||||
|
||||
export function patchTurnResponse(sessionId, turnIndex, assistantResponse) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(`UPDATE turns SET assistant_response = :resp
|
||||
WHERE session_id = :sid AND turn_index = :idx AND assistant_response IS NULL`)
|
||||
.run({
|
||||
":resp": assistantResponse,
|
||||
":sid": sessionId,
|
||||
":idx": turnIndex,
|
||||
});
|
||||
}
|
||||
|
||||
export function recordSessionFileTouch(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(`INSERT OR IGNORE INTO session_file_touches
|
||||
(session_id, path, tool_name, turn_id, first_seen_at)
|
||||
VALUES (:session_id, :path, :tool_name, :turn_id, :first_seen_at)`)
|
||||
.run({
|
||||
":session_id": entry.sessionId,
|
||||
":path": entry.path,
|
||||
":tool_name": entry.toolName ?? null,
|
||||
":turn_id": entry.turnId ?? null,
|
||||
":first_seen_at": entry.firstSeenAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function recordSessionRef(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(`INSERT OR IGNORE INTO session_refs
|
||||
(session_id, ref_type, ref_value, turn_id, created_at)
|
||||
VALUES (:session_id, :ref_type, :ref_value, :turn_id, :created_at)`)
|
||||
.run({
|
||||
":session_id": entry.sessionId,
|
||||
":ref_type": entry.refType,
|
||||
":ref_value": entry.refValue,
|
||||
":turn_id": entry.turnId ?? null,
|
||||
":created_at": entry.createdAt ?? new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function searchSessionTurns(query, limit = 20) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(`SELECT t.id, t.session_id, t.turn_index, t.ts,
|
||||
t.user_message, t.assistant_response,
|
||||
s.mode, s.cwd, s.repo, s.branch
|
||||
FROM turns_fts
|
||||
JOIN turns t ON turns_fts.rowid = t.id
|
||||
JOIN sessions s ON t.session_id = s.session_id
|
||||
WHERE turns_fts MATCH :query
|
||||
ORDER BY rank
|
||||
LIMIT :limit`)
|
||||
.all({ ":query": query, ":limit": Math.max(1, Math.min(100, limit)) });
|
||||
}
|
||||
|
||||
export function listRecentSessions(limit = 20) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(`SELECT s.session_id, s.mode, s.cwd, s.repo, s.branch,
|
||||
s.summary, s.created_at, s.updated_at,
|
||||
COUNT(DISTINCT t.id) AS turn_count,
|
||||
COUNT(DISTINCT f.id) AS file_count
|
||||
FROM sessions s
|
||||
LEFT JOIN turns t ON t.session_id = s.session_id
|
||||
LEFT JOIN session_file_touches f ON f.session_id = s.session_id
|
||||
GROUP BY s.session_id
|
||||
ORDER BY s.updated_at DESC
|
||||
LIMIT :limit`)
|
||||
.all({ ":limit": Math.max(1, Math.min(100, limit)) });
|
||||
}
|
||||
|
||||
export function insertSessionSnapshot(args) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const nextIndex = (() => {
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
"SELECT COALESCE(MAX(snapshot_index), -1) + 1 AS nxt FROM session_snapshots WHERE session_id = :sid",
|
||||
)
|
||||
.get({ ":sid": args.sessionId });
|
||||
return row ? Number(row["nxt"]) : 0;
|
||||
})();
|
||||
currentDb
|
||||
.prepare(`INSERT INTO session_snapshots
|
||||
(session_id, snapshot_index, git_stash_ref, label, ts)
|
||||
VALUES (:sid, :idx, :ref, :label, :ts)`)
|
||||
.run({
|
||||
":sid": args.sessionId,
|
||||
":idx": nextIndex,
|
||||
":ref": args.gitStashRef ?? null,
|
||||
":label": args.label ?? null,
|
||||
":ts": args.ts ?? new Date().toISOString(),
|
||||
});
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
"SELECT id FROM session_snapshots WHERE session_id = :sid AND snapshot_index = :idx",
|
||||
)
|
||||
.get({ ":sid": args.sessionId, ":idx": nextIndex });
|
||||
return row ? Number(row["id"]) : 0;
|
||||
}
|
||||
|
||||
export function listSessionSnapshots(sessionId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM session_snapshots WHERE session_id = :sid ORDER BY snapshot_index ASC",
|
||||
)
|
||||
.all({ ":sid": sessionId });
|
||||
}
|
||||
|
||||
464
src/resources/extensions/sf/sf-db/sf-db-slices.js
Normal file
464
src/resources/extensions/sf/sf-db/sf-db-slices.js
Normal file
|
|
@ -0,0 +1,464 @@
|
|||
import { _getAdapter, insertSliceSpecIfAbsent, parsePlanningMeeting, rowToSlice, safeParseJsonArray, rowToTask, transaction } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { existsSync, readdirSync, readFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
|
||||
export function insertSlice(s) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT INTO slices (
|
||||
milestone_id, id, title, status, risk, depends, demo, created_at,
|
||||
goal, success_criteria, proof_level, integration_closure, observability_impact,
|
||||
adversarial_partner, adversarial_combatant, adversarial_architect, planning_meeting_json, sequence,
|
||||
is_sketch, sketch_scope
|
||||
) VALUES (
|
||||
:milestone_id, :id, :title, :status, :risk, :depends, :demo, :created_at,
|
||||
:goal, :success_criteria, :proof_level, :integration_closure, :observability_impact,
|
||||
:adversarial_partner, :adversarial_combatant, :adversarial_architect, :planning_meeting_json, :sequence,
|
||||
:is_sketch, :sketch_scope
|
||||
)
|
||||
ON CONFLICT (milestone_id, id) DO UPDATE SET
|
||||
title = CASE WHEN :raw_title IS NOT NULL THEN excluded.title ELSE slices.title END,
|
||||
status = CASE WHEN slices.status IN ('complete', 'done') THEN slices.status ELSE excluded.status END,
|
||||
risk = CASE WHEN :raw_risk IS NOT NULL THEN excluded.risk ELSE slices.risk END,
|
||||
depends = excluded.depends,
|
||||
demo = CASE WHEN :raw_demo IS NOT NULL THEN excluded.demo ELSE slices.demo END,
|
||||
goal = CASE WHEN :raw_goal IS NOT NULL THEN excluded.goal ELSE slices.goal END,
|
||||
success_criteria = CASE WHEN :raw_success_criteria IS NOT NULL THEN excluded.success_criteria ELSE slices.success_criteria END,
|
||||
proof_level = CASE WHEN :raw_proof_level IS NOT NULL THEN excluded.proof_level ELSE slices.proof_level END,
|
||||
integration_closure = CASE WHEN :raw_integration_closure IS NOT NULL THEN excluded.integration_closure ELSE slices.integration_closure END,
|
||||
observability_impact = CASE WHEN :raw_observability_impact IS NOT NULL THEN excluded.observability_impact ELSE slices.observability_impact END,
|
||||
adversarial_partner = CASE WHEN :raw_adversarial_partner IS NOT NULL THEN excluded.adversarial_partner ELSE slices.adversarial_partner END,
|
||||
adversarial_combatant = CASE WHEN :raw_adversarial_combatant IS NOT NULL THEN excluded.adversarial_combatant ELSE slices.adversarial_combatant END,
|
||||
adversarial_architect = CASE WHEN :raw_adversarial_architect IS NOT NULL THEN excluded.adversarial_architect ELSE slices.adversarial_architect END,
|
||||
planning_meeting_json = CASE WHEN :raw_planning_meeting_json IS NOT NULL THEN excluded.planning_meeting_json ELSE slices.planning_meeting_json END,
|
||||
sequence = CASE WHEN :raw_sequence IS NOT NULL THEN excluded.sequence ELSE slices.sequence END,
|
||||
is_sketch = CASE WHEN :raw_is_sketch IS NOT NULL THEN excluded.is_sketch ELSE slices.is_sketch END,
|
||||
sketch_scope = CASE WHEN :raw_sketch_scope IS NOT NULL THEN excluded.sketch_scope ELSE slices.sketch_scope END`)
|
||||
.run({
|
||||
":milestone_id": s.milestoneId,
|
||||
":id": s.id,
|
||||
":title": s.title ?? "",
|
||||
":status": s.status ?? "pending",
|
||||
":risk": s.risk ?? "medium",
|
||||
":depends": JSON.stringify(s.depends ?? []),
|
||||
":demo": s.demo ?? "",
|
||||
":created_at": new Date().toISOString(),
|
||||
":goal": s.planning?.goal ?? "",
|
||||
":success_criteria": s.planning?.successCriteria ?? "",
|
||||
":proof_level": s.planning?.proofLevel ?? "",
|
||||
":integration_closure": s.planning?.integrationClosure ?? "",
|
||||
":observability_impact": s.planning?.observabilityImpact ?? "",
|
||||
":adversarial_partner": s.planning?.adversarialReview?.partner ?? "",
|
||||
":adversarial_combatant": s.planning?.adversarialReview?.combatant ?? "",
|
||||
":adversarial_architect": s.planning?.adversarialReview?.architect ?? "",
|
||||
":planning_meeting_json": s.planning?.planningMeeting
|
||||
? JSON.stringify(s.planning.planningMeeting)
|
||||
: "",
|
||||
":sequence": s.sequence ?? 0,
|
||||
":is_sketch": s.isSketch === true ? 1 : 0,
|
||||
":sketch_scope": s.sketchScope ?? "",
|
||||
// Raw sentinel params: NULL when caller omitted the field, used in ON CONFLICT guards
|
||||
":raw_title": s.title ?? null,
|
||||
":raw_risk": s.risk ?? null,
|
||||
":raw_demo": s.demo ?? null,
|
||||
":raw_goal": s.planning?.goal ?? null,
|
||||
":raw_success_criteria": s.planning?.successCriteria ?? null,
|
||||
":raw_proof_level": s.planning?.proofLevel ?? null,
|
||||
":raw_integration_closure": s.planning?.integrationClosure ?? null,
|
||||
":raw_observability_impact": s.planning?.observabilityImpact ?? null,
|
||||
":raw_adversarial_partner":
|
||||
s.planning?.adversarialReview?.partner ?? null,
|
||||
":raw_adversarial_combatant":
|
||||
s.planning?.adversarialReview?.combatant ?? null,
|
||||
":raw_adversarial_architect":
|
||||
s.planning?.adversarialReview?.architect ?? null,
|
||||
":raw_planning_meeting_json": s.planning?.planningMeeting
|
||||
? JSON.stringify(s.planning.planningMeeting)
|
||||
: null,
|
||||
":raw_sequence": s.sequence ?? null,
|
||||
":raw_is_sketch": s.isSketch === undefined ? null : s.isSketch ? 1 : 0,
|
||||
":raw_sketch_scope": s.sketchScope === undefined ? null : s.sketchScope,
|
||||
});
|
||||
insertSliceSpecIfAbsent(s.milestoneId, s.id, s.planning ?? {});
|
||||
}
|
||||
|
||||
export function insertOrIgnoreSlice(args) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR IGNORE INTO slices (milestone_id, id, title, status, created_at)
|
||||
VALUES (:mid, :sid, :title, 'pending', :ts)`)
|
||||
.run({
|
||||
":mid": args.milestoneId,
|
||||
":sid": args.sliceId,
|
||||
":title": args.title,
|
||||
":ts": args.createdAt,
|
||||
});
|
||||
}
|
||||
|
||||
export function clearSliceSketch(milestoneId, sliceId) {
|
||||
setSliceSketchFlag(milestoneId, sliceId, false);
|
||||
}
|
||||
|
||||
export function setSliceSketchFlag(milestoneId, sliceId, isSketch) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`UPDATE slices SET is_sketch = :is_sketch WHERE milestone_id = :mid AND id = :sid`,
|
||||
)
|
||||
.run({
|
||||
":is_sketch": isSketch ? 1 : 0,
|
||||
":mid": milestoneId,
|
||||
":sid": sliceId,
|
||||
});
|
||||
}
|
||||
|
||||
export function autoHealSketchFlags(milestoneId, hasPlanFile) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
`SELECT id FROM slices WHERE milestone_id = :mid AND is_sketch = 1`,
|
||||
)
|
||||
.all({ ":mid": milestoneId });
|
||||
for (const row of rows) {
|
||||
if (hasPlanFile(row.id)) {
|
||||
setSliceSketchFlag(milestoneId, row.id, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function upsertSlicePlanning(milestoneId, sliceId, planning) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
insertSliceSpecIfAbsent(milestoneId, sliceId, planning);
|
||||
currentDb
|
||||
.prepare(`UPDATE slices SET
|
||||
goal = COALESCE(:goal, goal),
|
||||
success_criteria = COALESCE(:success_criteria, success_criteria),
|
||||
proof_level = COALESCE(:proof_level, proof_level),
|
||||
integration_closure = COALESCE(:integration_closure, integration_closure),
|
||||
observability_impact = COALESCE(:observability_impact, observability_impact),
|
||||
adversarial_partner = COALESCE(:adversarial_partner, adversarial_partner),
|
||||
adversarial_combatant = COALESCE(:adversarial_combatant, adversarial_combatant),
|
||||
adversarial_architect = COALESCE(:adversarial_architect, adversarial_architect),
|
||||
planning_meeting_json = COALESCE(:planning_meeting_json, planning_meeting_json)
|
||||
WHERE milestone_id = :milestone_id AND id = :id`)
|
||||
.run({
|
||||
":milestone_id": milestoneId,
|
||||
":id": sliceId,
|
||||
":goal": planning.goal ?? null,
|
||||
":success_criteria": planning.successCriteria ?? null,
|
||||
":proof_level": planning.proofLevel ?? null,
|
||||
":integration_closure": planning.integrationClosure ?? null,
|
||||
":observability_impact": planning.observabilityImpact ?? null,
|
||||
":adversarial_partner": planning.adversarialReview?.partner ?? null,
|
||||
":adversarial_combatant": planning.adversarialReview?.combatant ?? null,
|
||||
":adversarial_architect": planning.adversarialReview?.architect ?? null,
|
||||
":planning_meeting_json": planning.planningMeeting
|
||||
? JSON.stringify(planning.planningMeeting)
|
||||
: null,
|
||||
});
|
||||
}
|
||||
|
||||
export function getSlice(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare("SELECT * FROM slices WHERE milestone_id = :mid AND id = :sid")
|
||||
.get({ ":mid": milestoneId, ":sid": sliceId });
|
||||
if (!row) return null;
|
||||
return rowToSlice(row);
|
||||
}
|
||||
|
||||
export function updateSliceStatus(milestoneId, sliceId, status, completedAt) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`UPDATE slices SET status = :status, completed_at = :completed_at
|
||||
WHERE milestone_id = :milestone_id AND id = :id`)
|
||||
.run({
|
||||
":status": status,
|
||||
":completed_at": completedAt ?? null,
|
||||
":milestone_id": milestoneId,
|
||||
":id": sliceId,
|
||||
});
|
||||
}
|
||||
|
||||
export function setSliceUatVerdict(milestoneId, sliceId, verdict) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`UPDATE slices SET uat_verdict = :verdict WHERE milestone_id = :mid AND id = :sid`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId, ":verdict": verdict });
|
||||
}
|
||||
|
||||
export function getSliceUatVerdict(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
`SELECT uat_verdict FROM slices WHERE milestone_id = :mid AND id = :sid`,
|
||||
)
|
||||
.get({ ":mid": milestoneId, ":sid": sliceId });
|
||||
return row?.uat_verdict ?? null;
|
||||
}
|
||||
|
||||
export function backfillUatVerdicts(basePath) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
// Find all slices that have no verdict yet
|
||||
const rows = currentDb
|
||||
.prepare(`SELECT milestone_id, id FROM slices WHERE uat_verdict IS NULL`)
|
||||
.all();
|
||||
if (!rows.length) return;
|
||||
// Extract verdict from content — inline to avoid cross-module import at db layer
|
||||
function parseVerdictFromContent(content) {
|
||||
const fmMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
||||
if (fmMatch) {
|
||||
const m = fmMatch[1].match(/verdict:\s*([\w-]+)/i);
|
||||
if (m) {
|
||||
let v = m[1].toLowerCase();
|
||||
if (v === "passed") v = "pass";
|
||||
return v;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const bodyMatch = content.match(
|
||||
/\*\*Verdict:?\*\*\s*(?:✅\s*)?(\w[\w-]*)/i,
|
||||
);
|
||||
if (bodyMatch) {
|
||||
let v = bodyMatch[1].toLowerCase();
|
||||
if (v === "passed") v = "pass";
|
||||
return v;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const stmt = currentDb.prepare(
|
||||
`UPDATE slices SET uat_verdict = :verdict WHERE milestone_id = :mid AND id = :sid`,
|
||||
);
|
||||
for (const row of rows) {
|
||||
const mid = row["milestone_id"];
|
||||
const sid = row["id"];
|
||||
const sliceDir = join(basePath, ".sf", "milestones", mid, "slices", sid);
|
||||
const candidates = [
|
||||
join(sliceDir, `${sid}-ASSESSMENT.md`),
|
||||
join(sliceDir, `${sid}-UAT_RESULT.md`),
|
||||
];
|
||||
for (const candidatePath of candidates) {
|
||||
if (!existsSync(candidatePath)) continue;
|
||||
try {
|
||||
const content = readFileSync(candidatePath, "utf8");
|
||||
const verdict = parseVerdictFromContent(content);
|
||||
if (verdict) {
|
||||
stmt.run({ ":mid": mid, ":sid": sid, ":verdict": verdict });
|
||||
break;
|
||||
}
|
||||
} catch {
|
||||
// Skip unreadable files
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function setSliceSummaryMd(milestoneId, sliceId, summaryMd, uatMd) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`UPDATE slices SET full_summary_md = :summary_md, full_uat_md = :uat_md WHERE milestone_id = :mid AND id = :sid`,
|
||||
)
|
||||
.run({
|
||||
":mid": milestoneId,
|
||||
":sid": sliceId,
|
||||
":summary_md": summaryMd,
|
||||
":uat_md": uatMd,
|
||||
});
|
||||
}
|
||||
|
||||
export function getMilestoneSlices(milestoneId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM slices WHERE milestone_id = :mid ORDER BY sequence, id",
|
||||
)
|
||||
.all({ ":mid": milestoneId });
|
||||
return rows.map(rowToSlice);
|
||||
}
|
||||
|
||||
export function getSliceStatusSummary(milestoneId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(
|
||||
"SELECT id, status FROM slices WHERE milestone_id = :mid ORDER BY sequence, id",
|
||||
)
|
||||
.all({ ":mid": milestoneId })
|
||||
.map((r) => ({ id: r["id"], status: r["status"] }));
|
||||
}
|
||||
|
||||
export function getSliceTaskCounts(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return { total: 0, done: 0, pending: 0 };
|
||||
const row = currentDb
|
||||
.prepare(`SELECT
|
||||
COUNT(*) as total,
|
||||
SUM(CASE WHEN status IN ('complete', 'done') THEN 1 ELSE 0 END) as done,
|
||||
SUM(CASE WHEN status NOT IN ('complete', 'done') THEN 1 ELSE 0 END) as pending
|
||||
FROM tasks WHERE milestone_id = :mid AND slice_id = :sid`)
|
||||
.get({ ":mid": milestoneId, ":sid": sliceId });
|
||||
if (!row) return { total: 0, done: 0, pending: 0 };
|
||||
return {
|
||||
total: row["total"] ?? 0,
|
||||
done: row["done"] ?? 0,
|
||||
pending: row["pending"] ?? 0,
|
||||
};
|
||||
}
|
||||
|
||||
export function syncSliceDependencies(milestoneId, sliceId, depends) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(
|
||||
"DELETE FROM slice_dependencies WHERE milestone_id = :mid AND slice_id = :sid",
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId });
|
||||
for (const dep of depends) {
|
||||
currentDb
|
||||
.prepare(
|
||||
"INSERT OR IGNORE INTO slice_dependencies (milestone_id, slice_id, depends_on_slice_id) VALUES (:mid, :sid, :dep)",
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId, ":dep": dep });
|
||||
}
|
||||
}
|
||||
|
||||
export function getDependentSlices(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(
|
||||
"SELECT slice_id FROM slice_dependencies WHERE milestone_id = :mid AND depends_on_slice_id = :sid",
|
||||
)
|
||||
.all({ ":mid": milestoneId, ":sid": sliceId })
|
||||
.map((r) => r["slice_id"]);
|
||||
}
|
||||
|
||||
export function getActiveSliceFromDb(milestoneId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
// Find the first non-complete slice whose dependencies are all satisfied.
|
||||
// Primary: uses the slice_dependencies junction table (kept in sync by syncSliceDependencies).
|
||||
// Fallback: for slices with no junction rows, check the `depends` JSON column directly
|
||||
// to handle legacy data or rows that were written before syncSliceDependencies ran.
|
||||
const candidates = currentDb
|
||||
.prepare(`SELECT s.* FROM slices s
|
||||
WHERE s.milestone_id = :mid
|
||||
AND s.status NOT IN ('complete', 'done', 'skipped')
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM slice_dependencies d
|
||||
WHERE d.milestone_id = :mid
|
||||
AND d.slice_id = s.id
|
||||
AND d.depends_on_slice_id NOT IN (
|
||||
SELECT id FROM slices WHERE milestone_id = :mid AND status IN ('complete', 'done', 'skipped')
|
||||
)
|
||||
)
|
||||
ORDER BY s.sequence, s.id`)
|
||||
.all({ ":mid": milestoneId });
|
||||
if (candidates.length === 0) return null;
|
||||
// Collect completed slice IDs for JSON-dep fallback check.
|
||||
const completedIds = new Set(
|
||||
currentDb
|
||||
.prepare(
|
||||
"SELECT id FROM slices WHERE milestone_id = :mid AND status IN ('complete', 'done', 'skipped')",
|
||||
)
|
||||
.all({ ":mid": milestoneId })
|
||||
.map((r) => r["id"]),
|
||||
);
|
||||
for (const candidate of candidates) {
|
||||
const hasSyncedDeps =
|
||||
(currentDb
|
||||
.prepare(
|
||||
"SELECT COUNT(*) as c FROM slice_dependencies WHERE milestone_id = :mid AND slice_id = :sid",
|
||||
)
|
||||
.get({ ":mid": milestoneId, ":sid": candidate["id"] })?.c ?? 0) > 0;
|
||||
if (hasSyncedDeps) {
|
||||
// Junction table is authoritative and candidate already passed the NOT EXISTS check.
|
||||
return rowToSlice(candidate);
|
||||
}
|
||||
// No junction rows for this slice — fall back to JSON depends column.
|
||||
const jsonDeps = safeParseJsonArray(candidate["depends"]);
|
||||
if (jsonDeps.length === 0 || jsonDeps.every((d) => completedIds.has(d))) {
|
||||
return rowToSlice(candidate);
|
||||
}
|
||||
// JSON deps not yet satisfied — continue to next candidate.
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export function updateSliceFields(milestoneId, sliceId, fields) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`UPDATE slices SET
|
||||
title = COALESCE(:title, title),
|
||||
risk = COALESCE(:risk, risk),
|
||||
depends = COALESCE(:depends, depends),
|
||||
demo = COALESCE(:demo, demo)
|
||||
WHERE milestone_id = :milestone_id AND id = :id`)
|
||||
.run({
|
||||
":milestone_id": milestoneId,
|
||||
":id": sliceId,
|
||||
":title": fields.title ?? null,
|
||||
":risk": fields.risk ?? null,
|
||||
":depends": fields.depends ? JSON.stringify(fields.depends) : null,
|
||||
":demo": fields.demo ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
export function setSliceReplanTriggeredAt(milestoneId, sliceId, ts) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
"UPDATE slices SET replan_triggered_at = :ts WHERE milestone_id = :mid AND id = :sid",
|
||||
)
|
||||
.run({ ":ts": ts, ":mid": milestoneId, ":sid": sliceId });
|
||||
}
|
||||
|
||||
export function deleteSlice(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
transaction(() => {
|
||||
// Cascade-style manual deletion: evidence → tasks → dependencies → slice
|
||||
currentDb
|
||||
.prepare(
|
||||
`DELETE FROM verification_evidence WHERE milestone_id = :mid AND slice_id = :sid`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId });
|
||||
currentDb
|
||||
.prepare(
|
||||
`DELETE FROM tasks WHERE milestone_id = :mid AND slice_id = :sid`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId });
|
||||
currentDb
|
||||
.prepare(
|
||||
`DELETE FROM slice_dependencies WHERE milestone_id = :mid AND slice_id = :sid`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId });
|
||||
currentDb
|
||||
.prepare(
|
||||
`DELETE FROM slice_dependencies WHERE milestone_id = :mid AND depends_on_slice_id = :sid`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId });
|
||||
currentDb
|
||||
.prepare(`DELETE FROM slices WHERE milestone_id = :mid AND id = :sid`)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId });
|
||||
});
|
||||
}
|
||||
|
||||
163
src/resources/extensions/sf/sf-db/sf-db-spec.js
Normal file
163
src/resources/extensions/sf/sf-db/sf-db-spec.js
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
import { _getAdapter } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
|
||||
export function getMilestoneSpec(milestoneId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
return currentDb
|
||||
.prepare("SELECT * FROM milestone_specs WHERE id = ?")
|
||||
.get(milestoneId);
|
||||
}
|
||||
|
||||
export function getSliceSpec(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
return currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM slice_specs WHERE milestone_id = ? AND slice_id = ?",
|
||||
)
|
||||
.get(milestoneId, sliceId);
|
||||
}
|
||||
|
||||
export function getTaskSpec(milestoneId, sliceId, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
return currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM task_specs WHERE milestone_id = ? AND slice_id = ? AND task_id = ?",
|
||||
)
|
||||
.get(milestoneId, sliceId, taskId);
|
||||
}
|
||||
|
||||
export function startValidationRun({ milestoneId, sliceId, taskId, contract }) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const runId = crypto.randomUUID();
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO validation_runs
|
||||
(run_id, milestone_id, slice_id, task_id, contract, status, started_at, created_at)
|
||||
VALUES (:run_id, :milestone_id, :slice_id, :task_id, :contract, 'running', datetime('now'), datetime('now'))`,
|
||||
)
|
||||
.run({
|
||||
":run_id": runId,
|
||||
":milestone_id": milestoneId,
|
||||
":slice_id": sliceId ?? null,
|
||||
":task_id": taskId ?? null,
|
||||
":contract": contract ?? "",
|
||||
});
|
||||
return runId;
|
||||
}
|
||||
|
||||
export function completeValidationRun({
|
||||
runId,
|
||||
verdict,
|
||||
rationale = "",
|
||||
findings = "",
|
||||
}) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const status =
|
||||
verdict === "pass" ? "pass" : verdict === "fail" ? "fail" : "error";
|
||||
const result = currentDb
|
||||
.prepare(
|
||||
`UPDATE validation_runs SET
|
||||
status = :status,
|
||||
verdict = :verdict,
|
||||
rationale = :rationale,
|
||||
findings = :findings,
|
||||
completed_at = datetime('now')
|
||||
WHERE run_id = :run_id AND status = 'running'`,
|
||||
)
|
||||
.run({
|
||||
":run_id": runId,
|
||||
":status": status,
|
||||
":verdict": verdict ?? "",
|
||||
":rationale": rationale ?? "",
|
||||
":findings": findings ?? "",
|
||||
});
|
||||
if (result.changes === 0) {
|
||||
throw new SFError(
|
||||
SF_STALE_STATE,
|
||||
`sf-db: completeValidationRun: no running validation run found for run_id=${runId}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function getLatestValidationState(milestoneId, sliceId, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
`SELECT * FROM validation_runs
|
||||
WHERE milestone_id = :milestone_id
|
||||
AND slice_id IS :slice_id
|
||||
AND task_id IS :task_id
|
||||
ORDER BY created_at DESC, run_id DESC
|
||||
LIMIT 1`,
|
||||
)
|
||||
.all({
|
||||
":milestone_id": milestoneId,
|
||||
":slice_id": sliceId ?? null,
|
||||
":task_id": taskId ?? null,
|
||||
});
|
||||
return rows[0] ?? null;
|
||||
}
|
||||
|
||||
export function getValidationHistory(milestoneId, sliceId, taskId, limit = 20) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(
|
||||
`SELECT * FROM validation_runs
|
||||
WHERE milestone_id = :milestone_id
|
||||
AND slice_id IS :slice_id
|
||||
AND task_id IS :task_id
|
||||
ORDER BY created_at DESC, run_id DESC
|
||||
LIMIT :limit`,
|
||||
)
|
||||
.all({
|
||||
":milestone_id": milestoneId,
|
||||
":slice_id": sliceId ?? null,
|
||||
":task_id": taskId ?? null,
|
||||
":limit": limit,
|
||||
});
|
||||
}
|
||||
|
||||
export function getValidationAttentionMarker(milestoneId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
return (
|
||||
currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM validation_attention_markers WHERE milestone_id = ?",
|
||||
)
|
||||
.get(milestoneId) ?? null
|
||||
);
|
||||
}
|
||||
|
||||
export function upsertValidationAttentionMarker(milestoneId, marker) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const now = new Date().toISOString();
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT INTO validation_attention_markers
|
||||
(milestone_id, created_at, source, remediation_round, revalidation_round, revalidation_requested_at)
|
||||
VALUES (:milestone_id, :created_at, :source, :remediation_round, :revalidation_round, :revalidation_requested_at)
|
||||
ON CONFLICT(milestone_id) DO UPDATE SET
|
||||
source = excluded.source,
|
||||
remediation_round = excluded.remediation_round,
|
||||
revalidation_round = excluded.revalidation_round,
|
||||
revalidation_requested_at = excluded.revalidation_requested_at`,
|
||||
)
|
||||
.run({
|
||||
":milestone_id": milestoneId,
|
||||
":created_at": marker.createdAt ?? now,
|
||||
":source": marker.source ?? null,
|
||||
":remediation_round": marker.remediationRound ?? null,
|
||||
":revalidation_round": marker.revalidationRound ?? null,
|
||||
":revalidation_requested_at": marker.revalidationRequestedAt ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
455
src/resources/extensions/sf/sf-db/sf-db-tasks.js
Normal file
455
src/resources/extensions/sf/sf-db/sf-db-tasks.js
Normal file
|
|
@ -0,0 +1,455 @@
|
|||
import { _getAdapter, hasTaskSpecIntent, insertTaskSpecIfAbsent, rowToTask, safeParseJsonArray, transaction } from './sf-db-core.js';
|
||||
import { SF_STALE_STATE, SFError } from '../errors.js';
|
||||
import { normalizeSchedulerStatus, normalizeTaskStatus, taskFrontmatterFromRecord, withTaskFrontmatter } from '../task-frontmatter.js';
|
||||
import { logWarning } from '../workflow-logger.js';
|
||||
|
||||
export function insertTask(t) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT INTO tasks (
|
||||
milestone_id, slice_id, id, title, status, task_status, one_liner, narrative,
|
||||
verification_result, verification_status, duration, completed_at, blocker_discovered,
|
||||
deviations, known_issues, key_files, key_decisions, full_summary_md,
|
||||
description, estimate, files, verify, inputs, expected_output, observability_impact, sequence
|
||||
) VALUES (
|
||||
:milestone_id, :slice_id, :id, :title, :status, :task_status, :one_liner, :narrative,
|
||||
:verification_result, :verification_status, :duration, :completed_at, :blocker_discovered,
|
||||
:deviations, :known_issues, :key_files, :key_decisions, :full_summary_md,
|
||||
:description, :estimate, :files, :verify, :inputs, :expected_output, :observability_impact, :sequence
|
||||
)
|
||||
ON CONFLICT(milestone_id, slice_id, id) DO UPDATE SET
|
||||
title = CASE WHEN NULLIF(:title, '') IS NOT NULL THEN :title ELSE tasks.title END,
|
||||
status = :status,
|
||||
task_status = :task_status,
|
||||
one_liner = :one_liner,
|
||||
narrative = :narrative,
|
||||
verification_result = :verification_result,
|
||||
verification_status = :verification_status,
|
||||
duration = :duration,
|
||||
completed_at = :completed_at,
|
||||
blocker_discovered = :blocker_discovered,
|
||||
deviations = :deviations,
|
||||
known_issues = :known_issues,
|
||||
key_files = :key_files,
|
||||
key_decisions = :key_decisions,
|
||||
full_summary_md = :full_summary_md,
|
||||
description = CASE WHEN NULLIF(:description, '') IS NOT NULL THEN :description ELSE tasks.description END,
|
||||
estimate = CASE WHEN NULLIF(:estimate, '') IS NOT NULL THEN :estimate ELSE tasks.estimate END,
|
||||
files = CASE WHEN NULLIF(:files, '[]') IS NOT NULL THEN :files ELSE tasks.files END,
|
||||
verify = CASE WHEN NULLIF(:verify, '') IS NOT NULL THEN :verify ELSE tasks.verify END,
|
||||
inputs = CASE WHEN NULLIF(:inputs, '[]') IS NOT NULL THEN :inputs ELSE tasks.inputs END,
|
||||
expected_output = CASE WHEN NULLIF(:expected_output, '[]') IS NOT NULL THEN :expected_output ELSE tasks.expected_output END,
|
||||
observability_impact = CASE WHEN NULLIF(:observability_impact, '') IS NOT NULL THEN :observability_impact ELSE tasks.observability_impact END,
|
||||
sequence = :sequence`)
|
||||
.run({
|
||||
":milestone_id": t.milestoneId,
|
||||
":slice_id": t.sliceId,
|
||||
":id": t.id,
|
||||
":title": t.title ?? "",
|
||||
":status": t.status ?? "pending",
|
||||
":task_status": normalizeTaskStatus(t.taskStatus ?? t.status) ?? "todo",
|
||||
":one_liner": t.oneLiner ?? "",
|
||||
":narrative": t.narrative ?? "",
|
||||
":verification_result": t.verificationResult ?? "",
|
||||
":verification_status": t.verificationStatus ?? "",
|
||||
":duration": t.duration ?? "",
|
||||
":completed_at":
|
||||
t.status === "done" || t.status === "complete"
|
||||
? new Date().toISOString()
|
||||
: null,
|
||||
":blocker_discovered": t.blockerDiscovered ? 1 : 0,
|
||||
":deviations": t.deviations ?? "",
|
||||
":known_issues": t.knownIssues ?? "",
|
||||
":key_files": JSON.stringify(t.keyFiles ?? []),
|
||||
":key_decisions": JSON.stringify(t.keyDecisions ?? []),
|
||||
":full_summary_md": t.fullSummaryMd ?? "",
|
||||
":description": t.planning?.description ?? "",
|
||||
":estimate": t.planning?.estimate ?? "",
|
||||
":files": JSON.stringify(t.planning?.files ?? []),
|
||||
":verify": t.planning?.verify ?? "",
|
||||
":inputs": JSON.stringify(t.planning?.inputs ?? []),
|
||||
":expected_output": JSON.stringify(t.planning?.expectedOutput ?? []),
|
||||
":observability_impact": t.planning?.observabilityImpact ?? "",
|
||||
":sequence": t.sequence ?? 0,
|
||||
});
|
||||
if (hasTaskSpecIntent(t.planning)) {
|
||||
insertTaskSpecIfAbsent(t.milestoneId, t.sliceId, t.id, t.planning ?? {});
|
||||
}
|
||||
upsertTaskSchedulerStatus(t.milestoneId, t.sliceId, t.id, "queued", {
|
||||
onlyIfAbsent: true,
|
||||
});
|
||||
}
|
||||
|
||||
export function insertOrIgnoreTask(args) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`INSERT OR IGNORE INTO tasks (milestone_id, slice_id, id, title, status, created_at)
|
||||
VALUES (:mid, :sid, :tid, :title, 'pending', :ts)`)
|
||||
.run({
|
||||
":mid": args.milestoneId,
|
||||
":sid": args.sliceId,
|
||||
":tid": args.taskId,
|
||||
":title": args.title,
|
||||
":ts": args.createdAt,
|
||||
});
|
||||
}
|
||||
|
||||
export function upsertTaskSchedulerStatus(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
taskId,
|
||||
status = "queued",
|
||||
{ onlyIfAbsent = false } = {},
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const schedulerStatus = normalizeSchedulerStatus(status) ?? "queued";
|
||||
const sql = onlyIfAbsent
|
||||
? `INSERT OR IGNORE INTO task_scheduler (
|
||||
milestone_id, slice_id, task_id, status, updated_at
|
||||
) VALUES (
|
||||
:milestone_id, :slice_id, :task_id, :status, :updated_at
|
||||
)`
|
||||
: `INSERT INTO task_scheduler (
|
||||
milestone_id, slice_id, task_id, status, updated_at
|
||||
) VALUES (
|
||||
:milestone_id, :slice_id, :task_id, :status, :updated_at
|
||||
)
|
||||
ON CONFLICT(milestone_id, slice_id, task_id) DO UPDATE SET
|
||||
status = excluded.status,
|
||||
updated_at = excluded.updated_at`;
|
||||
currentDb.prepare(sql).run({
|
||||
":milestone_id": milestoneId,
|
||||
":slice_id": sliceId,
|
||||
":task_id": taskId,
|
||||
":status": schedulerStatus,
|
||||
":updated_at": new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
export function updateTaskStatus(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
taskId,
|
||||
status,
|
||||
completedAt,
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
const taskStatus = normalizeTaskStatus(status) ?? "todo";
|
||||
currentDb
|
||||
.prepare(`UPDATE tasks SET
|
||||
status = :status,
|
||||
completed_at = :completed_at,
|
||||
task_status = :task_status
|
||||
WHERE milestone_id = :milestone_id AND slice_id = :slice_id AND id = :id`)
|
||||
.run({
|
||||
":status": status,
|
||||
":completed_at": completedAt ?? null,
|
||||
":task_status": taskStatus,
|
||||
":milestone_id": milestoneId,
|
||||
":slice_id": sliceId,
|
||||
":id": taskId,
|
||||
});
|
||||
}
|
||||
|
||||
export function setTaskEscalationPending(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
taskId,
|
||||
artifactPath,
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`UPDATE tasks
|
||||
SET escalation_pending = 1,
|
||||
escalation_awaiting_review = 0,
|
||||
escalation_artifact_path = :path
|
||||
WHERE milestone_id = :mid AND slice_id = :sid AND id = :tid`)
|
||||
.run({
|
||||
":path": artifactPath,
|
||||
":mid": milestoneId,
|
||||
":sid": sliceId,
|
||||
":tid": taskId,
|
||||
});
|
||||
}
|
||||
|
||||
export function setTaskEscalationAwaitingReview(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
taskId,
|
||||
artifactPath,
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`UPDATE tasks
|
||||
SET escalation_awaiting_review = 1,
|
||||
escalation_pending = 0,
|
||||
escalation_artifact_path = :path
|
||||
WHERE milestone_id = :mid AND slice_id = :sid AND id = :tid`)
|
||||
.run({
|
||||
":path": artifactPath,
|
||||
":mid": milestoneId,
|
||||
":sid": sliceId,
|
||||
":tid": taskId,
|
||||
});
|
||||
}
|
||||
|
||||
export function clearTaskEscalationFlags(milestoneId, sliceId, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(`UPDATE tasks
|
||||
SET escalation_pending = 0,
|
||||
escalation_awaiting_review = 0
|
||||
WHERE milestone_id = :mid AND slice_id = :sid AND id = :tid`)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId });
|
||||
}
|
||||
|
||||
export function findUnappliedEscalationOverride(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare(`SELECT id, escalation_artifact_path
|
||||
FROM tasks
|
||||
WHERE milestone_id = :mid
|
||||
AND slice_id = :sid
|
||||
AND escalation_artifact_path IS NOT NULL
|
||||
AND escalation_pending = 0
|
||||
AND escalation_awaiting_review = 0
|
||||
AND escalation_override_applied = 0
|
||||
ORDER BY sequence ASC, id ASC
|
||||
LIMIT 1`)
|
||||
.get({ ":mid": milestoneId, ":sid": sliceId });
|
||||
if (!row || !row.escalation_artifact_path) return null;
|
||||
return { taskId: row.id, artifactPath: row.escalation_artifact_path };
|
||||
}
|
||||
|
||||
export function claimEscalationOverride(milestoneId, sliceId, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return false;
|
||||
const result = currentDb
|
||||
.prepare(`UPDATE tasks
|
||||
SET escalation_override_applied = 1
|
||||
WHERE milestone_id = :mid
|
||||
AND slice_id = :sid
|
||||
AND id = :tid
|
||||
AND escalation_override_applied = 0`)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId });
|
||||
return (result?.changes ?? 0) > 0;
|
||||
}
|
||||
|
||||
export function setTaskBlockerDiscovered(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
taskId,
|
||||
discovered,
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(
|
||||
`UPDATE tasks SET blocker_discovered = :discovered WHERE milestone_id = :mid AND slice_id = :sid AND id = :tid`,
|
||||
)
|
||||
.run({
|
||||
":discovered": discovered ? 1 : 0,
|
||||
":mid": milestoneId,
|
||||
":sid": sliceId,
|
||||
":tid": taskId,
|
||||
});
|
||||
}
|
||||
|
||||
export function upsertTaskPlanning(milestoneId, sliceId, taskId, planning) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
insertTaskSpecIfAbsent(milestoneId, sliceId, taskId, planning);
|
||||
const { normalized: frontmatter, errors: fmErrors } =
|
||||
taskFrontmatterFromRecord(planning);
|
||||
if (fmErrors?.length)
|
||||
logWarning(
|
||||
"sf-db:upsertTaskPlanning",
|
||||
`frontmatter validation errors for ${milestoneId}/${sliceId}/${taskId}: ${fmErrors.join(", ")}`,
|
||||
);
|
||||
const hasTaskStatus =
|
||||
planning.taskStatus !== undefined ||
|
||||
planning.task_status !== undefined ||
|
||||
planning.status !== undefined;
|
||||
currentDb
|
||||
.prepare(`UPDATE tasks SET
|
||||
title = COALESCE(:title, title),
|
||||
description = COALESCE(:description, description),
|
||||
estimate = COALESCE(:estimate, estimate),
|
||||
files = COALESCE(:files, files),
|
||||
verify = COALESCE(:verify, verify),
|
||||
inputs = COALESCE(:inputs, inputs),
|
||||
expected_output = COALESCE(:expected_output, expected_output),
|
||||
observability_impact = COALESCE(:observability_impact, observability_impact),
|
||||
full_plan_md = COALESCE(:full_plan_md, full_plan_md),
|
||||
risk = :risk,
|
||||
mutation_scope = :mutation_scope,
|
||||
verification_type = :verification_type,
|
||||
plan_approval = :plan_approval,
|
||||
task_status = CASE WHEN :has_task_status = 1 THEN :task_status ELSE task_status END,
|
||||
estimated_effort = :estimated_effort,
|
||||
dependencies = :dependencies,
|
||||
blocks_parallel = :blocks_parallel,
|
||||
requires_user_input = :requires_user_input,
|
||||
auto_retry = :auto_retry,
|
||||
max_retries = :max_retries,
|
||||
frontmatter_version = :frontmatter_version
|
||||
WHERE milestone_id = :milestone_id AND slice_id = :slice_id AND id = :id`)
|
||||
.run({
|
||||
":milestone_id": milestoneId,
|
||||
":slice_id": sliceId,
|
||||
":id": taskId,
|
||||
":title": planning.title ?? null,
|
||||
":description": planning.description ?? null,
|
||||
":estimate": planning.estimate ?? null,
|
||||
":files": planning.files ? JSON.stringify(planning.files) : null,
|
||||
":verify": planning.verify ?? null,
|
||||
":inputs": planning.inputs ? JSON.stringify(planning.inputs) : null,
|
||||
":expected_output": planning.expectedOutput
|
||||
? JSON.stringify(planning.expectedOutput)
|
||||
: null,
|
||||
":observability_impact": planning.observabilityImpact ?? null,
|
||||
":full_plan_md": planning.fullPlanMd ?? null,
|
||||
":risk": frontmatter.risk,
|
||||
":mutation_scope": frontmatter.mutationScope,
|
||||
":verification_type": frontmatter.verification,
|
||||
":plan_approval": frontmatter.planApproval,
|
||||
":task_status": frontmatter.taskStatus,
|
||||
":has_task_status": hasTaskStatus ? 1 : 0,
|
||||
":estimated_effort": frontmatter.estimatedEffort,
|
||||
":dependencies": JSON.stringify(frontmatter.dependencies),
|
||||
":blocks_parallel": frontmatter.blocksParallel ? 1 : 0,
|
||||
":requires_user_input": frontmatter.requiresUserInput ? 1 : 0,
|
||||
":auto_retry": frontmatter.autoRetry ? 1 : 0,
|
||||
":max_retries": frontmatter.maxRetries,
|
||||
":frontmatter_version": frontmatter.frontmatterVersion,
|
||||
});
|
||||
if (
|
||||
planning.schedulerStatus !== undefined ||
|
||||
planning.scheduler_status !== undefined
|
||||
) {
|
||||
upsertTaskSchedulerStatus(
|
||||
milestoneId,
|
||||
sliceId,
|
||||
taskId,
|
||||
frontmatter.schedulerStatus,
|
||||
);
|
||||
} else {
|
||||
upsertTaskSchedulerStatus(milestoneId, sliceId, taskId, "queued", {
|
||||
onlyIfAbsent: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function getTask(milestoneId, sliceId, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
`SELECT t.*, ts.status AS scheduler_status
|
||||
FROM tasks t
|
||||
LEFT JOIN task_scheduler ts
|
||||
ON t.milestone_id = ts.milestone_id
|
||||
AND t.slice_id = ts.slice_id
|
||||
AND t.id = ts.task_id
|
||||
WHERE t.milestone_id = :mid AND t.slice_id = :sid AND t.id = :tid`,
|
||||
)
|
||||
.get({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId });
|
||||
if (!row) return null;
|
||||
return rowToTask(row);
|
||||
}
|
||||
|
||||
export function setTaskSummaryMd(milestoneId, sliceId, taskId, md) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
currentDb
|
||||
.prepare(
|
||||
`UPDATE tasks SET full_summary_md = :md WHERE milestone_id = :mid AND slice_id = :sid AND id = :tid`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId, ":md": md });
|
||||
}
|
||||
|
||||
export function getActiveTaskFromDb(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
"SELECT * FROM tasks WHERE milestone_id = :mid AND slice_id = :sid AND status NOT IN ('complete', 'done') ORDER BY sequence, id LIMIT 1",
|
||||
)
|
||||
.get({ ":mid": milestoneId, ":sid": sliceId });
|
||||
if (!row) return null;
|
||||
return rowToTask(row);
|
||||
}
|
||||
|
||||
export function getActiveTaskIdFromDb(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return null;
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
"SELECT id, status, title FROM tasks WHERE milestone_id = :mid AND slice_id = :sid AND status NOT IN ('complete', 'done') ORDER BY sequence, id LIMIT 1",
|
||||
)
|
||||
.get({ ":mid": milestoneId, ":sid": sliceId });
|
||||
if (!row) return null;
|
||||
return {
|
||||
id: row["id"],
|
||||
status: row["status"],
|
||||
title: row["title"],
|
||||
};
|
||||
}
|
||||
|
||||
export function deleteTask(milestoneId, sliceId, taskId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) throw new SFError(SF_STALE_STATE, "sf-db: No database open");
|
||||
transaction(() => {
|
||||
// Must delete verification_evidence first (FK constraint)
|
||||
currentDb
|
||||
.prepare(
|
||||
`DELETE FROM verification_evidence WHERE milestone_id = :mid AND slice_id = :sid AND task_id = :tid`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId });
|
||||
currentDb
|
||||
.prepare(
|
||||
`DELETE FROM tasks WHERE milestone_id = :mid AND slice_id = :sid AND id = :tid`,
|
||||
)
|
||||
.run({ ":mid": milestoneId, ":sid": sliceId, ":tid": taskId });
|
||||
});
|
||||
}
|
||||
|
||||
export function listEscalationArtifacts(milestoneId, includeResolved = false) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const filter = includeResolved
|
||||
? "escalation_artifact_path IS NOT NULL"
|
||||
: "(escalation_pending = 1 OR escalation_awaiting_review = 1) AND escalation_artifact_path IS NOT NULL";
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
`SELECT * FROM tasks WHERE milestone_id = :mid AND ${filter} ORDER BY slice_id, sequence, id`,
|
||||
)
|
||||
.all({ ":mid": milestoneId });
|
||||
return rows.map(rowToTask);
|
||||
}
|
||||
|
||||
export function getSliceTasks(milestoneId, sliceId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
`SELECT t.*, ts.status AS scheduler_status
|
||||
FROM tasks t
|
||||
LEFT JOIN task_scheduler ts
|
||||
ON t.milestone_id = ts.milestone_id
|
||||
AND t.slice_id = ts.slice_id
|
||||
AND t.id = ts.task_id
|
||||
WHERE t.milestone_id = :mid AND t.slice_id = :sid
|
||||
ORDER BY t.sequence, t.id`,
|
||||
)
|
||||
.all({ ":mid": milestoneId, ":sid": sliceId });
|
||||
return rows.map(rowToTask);
|
||||
}
|
||||
366
src/resources/extensions/sf/sf-db/sf-db-uok.js
Normal file
366
src/resources/extensions/sf/sf-db/sf-db-uok.js
Normal file
|
|
@ -0,0 +1,366 @@
|
|||
import { _getAdapter, capErrorForStorage, parseJsonObject, rowToUnitMetrics } from './sf-db-core.js';
|
||||
import { logWarning } from '../workflow-logger.js';
|
||||
import { readTraceEvents } from '../uok/trace-writer.js';
|
||||
|
||||
export function recordUokRunStart(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
const now = entry.startedAt ?? new Date().toISOString();
|
||||
currentDb
|
||||
.prepare(`INSERT INTO uok_runs (
|
||||
run_id, session_id, path, status, started_at, ended_at, error, flags_json, updated_at
|
||||
) VALUES (
|
||||
:run_id, :session_id, :path, 'started', :started_at, NULL, NULL, :flags_json, :updated_at
|
||||
)
|
||||
ON CONFLICT(run_id) DO UPDATE SET
|
||||
session_id = excluded.session_id,
|
||||
path = excluded.path,
|
||||
status = 'started',
|
||||
started_at = excluded.started_at,
|
||||
ended_at = NULL,
|
||||
error = NULL,
|
||||
flags_json = excluded.flags_json,
|
||||
updated_at = excluded.updated_at`)
|
||||
.run({
|
||||
":run_id": entry.runId,
|
||||
":session_id": entry.sessionId ?? null,
|
||||
":path": entry.path ?? "",
|
||||
":started_at": now,
|
||||
":flags_json": JSON.stringify(entry.flags ?? {}),
|
||||
":updated_at": now,
|
||||
});
|
||||
}
|
||||
|
||||
export function recordUokRunExit(entry) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
const now = entry.endedAt ?? new Date().toISOString();
|
||||
currentDb
|
||||
.prepare(`INSERT INTO uok_runs (
|
||||
run_id, session_id, path, status, started_at, ended_at, error, flags_json, updated_at
|
||||
) VALUES (
|
||||
:run_id, :session_id, :path, :status, :started_at, :ended_at, :error, :flags_json, :updated_at
|
||||
)
|
||||
ON CONFLICT(run_id) DO UPDATE SET
|
||||
session_id = COALESCE(excluded.session_id, uok_runs.session_id),
|
||||
path = CASE WHEN excluded.path = '' THEN uok_runs.path ELSE excluded.path END,
|
||||
status = excluded.status,
|
||||
ended_at = excluded.ended_at,
|
||||
error = excluded.error,
|
||||
flags_json = CASE WHEN excluded.flags_json = '{}' THEN uok_runs.flags_json ELSE excluded.flags_json END,
|
||||
updated_at = excluded.updated_at`)
|
||||
.run({
|
||||
":run_id": entry.runId,
|
||||
":session_id": entry.sessionId ?? null,
|
||||
":path": entry.path ?? "",
|
||||
":status": entry.status ?? "ok",
|
||||
":started_at": entry.startedAt ?? now,
|
||||
":ended_at": now,
|
||||
":error": entry.error
|
||||
? capErrorForStorage(entry.error, entry.runId)
|
||||
: null,
|
||||
":flags_json": JSON.stringify(entry.flags ?? {}),
|
||||
":updated_at": now,
|
||||
});
|
||||
}
|
||||
|
||||
export function getUokRuns(limit = 500) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
return currentDb
|
||||
.prepare(
|
||||
`SELECT run_id, session_id, path, status, started_at, ended_at, error, flags_json, updated_at
|
||||
FROM uok_runs
|
||||
ORDER BY started_at DESC
|
||||
LIMIT :limit`,
|
||||
)
|
||||
.all({ ":limit": limit })
|
||||
.map((row) => ({
|
||||
runId: row.run_id,
|
||||
sessionId: row.session_id,
|
||||
path: row.path,
|
||||
status: row.status,
|
||||
startedAt: row.started_at,
|
||||
endedAt: row.ended_at,
|
||||
error: row.error,
|
||||
flags: (() => {
|
||||
try {
|
||||
return JSON.parse(row.flags_json || "{}");
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
})(),
|
||||
updatedAt: row.updated_at,
|
||||
}));
|
||||
}
|
||||
|
||||
export function insertAuditEvent(_entry) {
|
||||
// no-op: audit events now written exclusively to JSONL files
|
||||
}
|
||||
|
||||
export function insertUokMessage(msg) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return;
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT OR IGNORE INTO uok_messages (id, from_agent, to_agent, body, metadata_json, sent_at, delivered_at)
|
||||
VALUES (:id, :from_agent, :to_agent, :body, :metadata_json, :sent_at, :delivered_at)`,
|
||||
)
|
||||
.run({
|
||||
":id": msg.id,
|
||||
":from_agent": msg.from,
|
||||
":to_agent": msg.to,
|
||||
":body": msg.body ?? "",
|
||||
":metadata_json": JSON.stringify(msg.metadata ?? {}),
|
||||
":sent_at": msg.sentAt,
|
||||
":delivered_at": msg.deliveredAt ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
export function getUokMessagesForAgent(
|
||||
agentId,
|
||||
limit = 1000,
|
||||
unreadOnly = false,
|
||||
) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
try {
|
||||
let sql = `SELECT m.id, m.from_agent AS "from", m.to_agent AS "to", m.body, m.metadata_json AS metadataJson, m.sent_at AS sentAt, m.delivered_at AS deliveredAt,
|
||||
CASE WHEN r.agent_id IS NOT NULL THEN 1 ELSE 0 END AS read
|
||||
FROM uok_messages m
|
||||
LEFT JOIN uok_message_reads r ON r.message_id = m.id AND r.agent_id = :agent_id
|
||||
WHERE m.to_agent = :agent_id`;
|
||||
if (unreadOnly) {
|
||||
sql += " AND r.agent_id IS NULL";
|
||||
}
|
||||
sql += " ORDER BY m.sent_at ASC LIMIT :limit";
|
||||
const rows = currentDb.prepare(sql).all({
|
||||
":agent_id": agentId,
|
||||
":limit": Math.max(1, Math.min(10_000, Number(limit) || 1000)),
|
||||
});
|
||||
return rows.map((r) => ({
|
||||
id: r.id,
|
||||
from: r.from,
|
||||
to: r.to,
|
||||
body: r.body,
|
||||
metadata: parseJsonObject(r.metadataJson, {}),
|
||||
sentAt: r.sentAt,
|
||||
deliveredAt: r.deliveredAt,
|
||||
read: !!r.read,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function getUokConversation(agentA, agentB, limit = 1000) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
try {
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
`SELECT id, from_agent AS "from", to_agent AS "to", body, metadata_json AS metadataJson, sent_at AS sentAt, delivered_at AS deliveredAt
|
||||
FROM uok_messages
|
||||
WHERE (from_agent = :a AND to_agent = :b) OR (from_agent = :b AND to_agent = :a)
|
||||
ORDER BY sent_at DESC
|
||||
LIMIT :limit`,
|
||||
)
|
||||
.all({ ":a": agentA, ":b": agentB, ":limit": limit });
|
||||
return rows.map((r) => ({
|
||||
id: r.id,
|
||||
from: r.from,
|
||||
to: r.to,
|
||||
body: r.body,
|
||||
metadata: parseJsonObject(r.metadataJson, {}),
|
||||
sentAt: r.sentAt,
|
||||
deliveredAt: r.deliveredAt,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function markUokMessageRead(messageId, agentId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return false;
|
||||
try {
|
||||
currentDb
|
||||
.prepare(
|
||||
`INSERT OR IGNORE INTO uok_message_reads (message_id, agent_id, read_at) VALUES (:message_id, :agent_id, :read_at)`,
|
||||
)
|
||||
.run({
|
||||
":message_id": messageId,
|
||||
":agent_id": agentId,
|
||||
":read_at": new Date().toISOString(),
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function getUokMessageUnreadCount(agentId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return 0;
|
||||
try {
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
`SELECT COUNT(*) AS cnt FROM uok_messages m
|
||||
WHERE m.to_agent = :agent_id
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM uok_message_reads r
|
||||
WHERE r.message_id = m.id AND r.agent_id = :agent_id
|
||||
)`,
|
||||
)
|
||||
.get({ ":agent_id": agentId });
|
||||
return row?.cnt ?? 0;
|
||||
} catch {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
export function compactUokMessages(retentionDays) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return { before: 0, after: 0 };
|
||||
try {
|
||||
const cutoff = new Date(
|
||||
Date.now() - retentionDays * 24 * 60 * 60 * 1000,
|
||||
).toISOString();
|
||||
const beforeRow = currentDb
|
||||
.prepare("SELECT COUNT(*) AS cnt FROM uok_messages")
|
||||
.get();
|
||||
currentDb
|
||||
.prepare("DELETE FROM uok_messages WHERE sent_at < :cutoff")
|
||||
.run({ ":cutoff": cutoff });
|
||||
const afterRow = currentDb
|
||||
.prepare("SELECT COUNT(*) AS cnt FROM uok_messages")
|
||||
.get();
|
||||
return { before: beforeRow?.cnt ?? 0, after: afterRow?.cnt ?? 0 };
|
||||
} catch {
|
||||
return { before: 0, after: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
export function getUokMessageReadIds(agentId) {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) return [];
|
||||
try {
|
||||
const rows = currentDb
|
||||
.prepare(
|
||||
"SELECT message_id FROM uok_message_reads WHERE agent_id = :agent_id",
|
||||
)
|
||||
.all({ ":agent_id": agentId });
|
||||
return rows.map((r) => r.message_id);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function getUokMessageBusMetrics() {
|
||||
const currentDb = _getAdapter();
|
||||
if (!currentDb) {
|
||||
return {
|
||||
totalMessages: 0,
|
||||
totalUnread: 0,
|
||||
uniqueAgents: 0,
|
||||
uniqueConversations: 0,
|
||||
};
|
||||
}
|
||||
try {
|
||||
const totalRow = currentDb
|
||||
.prepare("SELECT COUNT(*) AS cnt FROM uok_messages")
|
||||
.get();
|
||||
const unreadRow = currentDb
|
||||
.prepare(
|
||||
`SELECT COUNT(*) AS cnt FROM uok_messages m
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM uok_message_reads r
|
||||
WHERE r.message_id = m.id
|
||||
AND r.agent_id = m.to_agent
|
||||
)`,
|
||||
)
|
||||
.get();
|
||||
const agentsRow = currentDb
|
||||
.prepare(`SELECT COUNT(DISTINCT to_agent) AS cnt FROM uok_messages`)
|
||||
.get();
|
||||
const convRow = currentDb
|
||||
.prepare(
|
||||
`SELECT COUNT(DISTINCT from_agent || ':' || to_agent) AS cnt FROM uok_messages`,
|
||||
)
|
||||
.get();
|
||||
return {
|
||||
totalMessages: totalRow?.cnt ?? 0,
|
||||
totalUnread: unreadRow?.cnt ?? 0,
|
||||
uniqueAgents: agentsRow?.cnt ?? 0,
|
||||
uniqueConversations: convRow?.cnt ?? 0,
|
||||
};
|
||||
} catch {
|
||||
return {
|
||||
totalMessages: 0,
|
||||
totalUnread: 0,
|
||||
uniqueAgents: 0,
|
||||
uniqueConversations: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function upsertUnitMetrics(db, unit) {
|
||||
db.prepare(
|
||||
`INSERT OR REPLACE INTO unit_metrics (
|
||||
type, id, started_at, finished_at, model, auto_session_key,
|
||||
tokens_input, tokens_output, tokens_cache_read, tokens_cache_write, tokens_total,
|
||||
cost, tool_calls, assistant_messages, user_messages, api_requests,
|
||||
tier, model_downgraded, context_window_tokens, truncation_sections,
|
||||
continue_here_fired, prompt_char_count, baseline_char_count, cache_hit_rate, skills
|
||||
) VALUES (
|
||||
:type, :id, :started_at, :finished_at, :model, :auto_session_key,
|
||||
:tokens_input, :tokens_output, :tokens_cache_read, :tokens_cache_write, :tokens_total,
|
||||
:cost, :tool_calls, :assistant_messages, :user_messages, :api_requests,
|
||||
:tier, :model_downgraded, :context_window_tokens, :truncation_sections,
|
||||
:continue_here_fired, :prompt_char_count, :baseline_char_count, :cache_hit_rate, :skills
|
||||
)`,
|
||||
).run({
|
||||
":type": unit.type,
|
||||
":id": unit.id,
|
||||
":started_at": unit.startedAt,
|
||||
":finished_at": unit.finishedAt,
|
||||
":model": unit.model,
|
||||
":auto_session_key": unit.autoSessionKey ?? null,
|
||||
":tokens_input": unit.tokens.input,
|
||||
":tokens_output": unit.tokens.output,
|
||||
":tokens_cache_read": unit.tokens.cacheRead,
|
||||
":tokens_cache_write": unit.tokens.cacheWrite,
|
||||
":tokens_total": unit.tokens.total,
|
||||
":cost": unit.cost,
|
||||
":tool_calls": unit.toolCalls,
|
||||
":assistant_messages": unit.assistantMessages,
|
||||
":user_messages": unit.userMessages,
|
||||
":api_requests": unit.apiRequests ?? unit.assistantMessages,
|
||||
":tier": unit.tier ?? null,
|
||||
":model_downgraded":
|
||||
unit.modelDowngraded != null ? (unit.modelDowngraded ? 1 : 0) : null,
|
||||
":context_window_tokens": unit.contextWindowTokens ?? null,
|
||||
":truncation_sections": unit.truncationSections ?? null,
|
||||
":continue_here_fired":
|
||||
unit.continueHereFired != null ? (unit.continueHereFired ? 1 : 0) : null,
|
||||
":prompt_char_count": unit.promptCharCount ?? null,
|
||||
":baseline_char_count": unit.baselineCharCount ?? null,
|
||||
":cache_hit_rate": unit.cacheHitRate ?? null,
|
||||
":skills": unit.skills != null ? JSON.stringify(unit.skills) : null,
|
||||
});
|
||||
}
|
||||
|
||||
export function getAllUnitMetrics(db) {
|
||||
return db
|
||||
.prepare("SELECT * FROM unit_metrics ORDER BY started_at ASC")
|
||||
.all()
|
||||
.map(rowToUnitMetrics);
|
||||
}
|
||||
|
||||
export function pruneUnitMetrics(db, keepCount) {
|
||||
db.prepare(
|
||||
`DELETE FROM unit_metrics WHERE rowid NOT IN (
|
||||
SELECT rowid FROM unit_metrics ORDER BY finished_at DESC LIMIT :keepCount
|
||||
)`,
|
||||
).run({ ":keepCount": keepCount });
|
||||
}
|
||||
265
src/resources/extensions/sf/sf-db/sf-db-worktree.js
Normal file
265
src/resources/extensions/sf/sf-db/sf-db-worktree.js
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
import { _getAdapter, openDatabase } from './sf-db-core.js';
|
||||
import { copyFileSync, existsSync, mkdirSync, realpathSync } from 'node:fs';
|
||||
import { dirname } from 'node:path';
|
||||
import { logError, logWarning } from '../workflow-logger.js';
|
||||
|
||||
export function copyWorktreeDb(srcDbPath, destDbPath) {
|
||||
try {
|
||||
if (!existsSync(srcDbPath)) return false;
|
||||
const destDir = dirname(destDbPath);
|
||||
mkdirSync(destDir, { recursive: true });
|
||||
copyFileSync(srcDbPath, destDbPath);
|
||||
return true;
|
||||
} catch (err) {
|
||||
logError("db", "failed to copy DB to worktree", {
|
||||
error: err.message,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function reconcileWorktreeDb(mainDbPath, worktreeDbPath) {
|
||||
const zero = {
|
||||
decisions: 0,
|
||||
requirements: 0,
|
||||
artifacts: 0,
|
||||
milestones: 0,
|
||||
slices: 0,
|
||||
tasks: 0,
|
||||
memories: 0,
|
||||
verification_evidence: 0,
|
||||
conflicts: [],
|
||||
};
|
||||
if (!existsSync(worktreeDbPath)) return zero;
|
||||
// Guard: bail when both paths resolve to the same physical file.
|
||||
// ATTACHing a WAL-mode DB to itself corrupts the WAL (#2823).
|
||||
try {
|
||||
if (realpathSync(mainDbPath) === realpathSync(worktreeDbPath)) return zero;
|
||||
} catch (e) {
|
||||
logWarning("db", `realpathSync failed: ${e.message}`);
|
||||
}
|
||||
// Sanitize path: reject any characters that could break ATTACH syntax.
|
||||
// ATTACH DATABASE doesn't support parameterized paths in all providers,
|
||||
// so we use strict allowlist validation instead.
|
||||
if (/['";\x00]/.test(worktreeDbPath)) {
|
||||
logError(
|
||||
"db",
|
||||
"worktree DB reconciliation failed: path contains unsafe characters",
|
||||
);
|
||||
return zero;
|
||||
}
|
||||
if (!_getAdapter()) {
|
||||
const opened = openDatabase(mainDbPath);
|
||||
if (!opened) {
|
||||
logError("db", "worktree DB reconciliation failed: cannot open main DB");
|
||||
return zero;
|
||||
}
|
||||
}
|
||||
const adapter = _getAdapter();
|
||||
const conflicts = [];
|
||||
try {
|
||||
adapter.exec(`ATTACH DATABASE '${worktreeDbPath}' AS wt`);
|
||||
try {
|
||||
const wtInfo = adapter.prepare("PRAGMA wt.table_info('decisions')").all();
|
||||
const hasMadeBy = wtInfo.some((col) => col["name"] === "made_by");
|
||||
const wtMilestoneInfo = adapter
|
||||
.prepare("PRAGMA wt.table_info('milestones')")
|
||||
.all();
|
||||
const hasProductResearch = wtMilestoneInfo.some(
|
||||
(col) => col["name"] === "product_research_json",
|
||||
);
|
||||
const decConf = adapter
|
||||
.prepare(
|
||||
`SELECT m.id FROM decisions m INNER JOIN wt.decisions w ON m.id = w.id WHERE m.decision != w.decision OR m.choice != w.choice OR m.rationale != w.rationale OR ${hasMadeBy ? "m.made_by != w.made_by" : "'agent' != 'agent'"} OR m.superseded_by IS NOT w.superseded_by`,
|
||||
)
|
||||
.all();
|
||||
for (const row of decConf)
|
||||
conflicts.push(`decision ${row["id"]}: modified in both`);
|
||||
const reqConf = adapter
|
||||
.prepare(
|
||||
`SELECT m.id FROM requirements m INNER JOIN wt.requirements w ON m.id = w.id WHERE m.description != w.description OR m.status != w.status OR m.notes != w.notes OR m.superseded_by IS NOT w.superseded_by`,
|
||||
)
|
||||
.all();
|
||||
for (const row of reqConf)
|
||||
conflicts.push(`requirement ${row["id"]}: modified in both`);
|
||||
const merged = {
|
||||
decisions: 0,
|
||||
requirements: 0,
|
||||
artifacts: 0,
|
||||
milestones: 0,
|
||||
slices: 0,
|
||||
tasks: 0,
|
||||
memories: 0,
|
||||
verification_evidence: 0,
|
||||
};
|
||||
function countChanges(result) {
|
||||
return typeof result === "object" && result !== null
|
||||
? (result.changes ?? 0)
|
||||
: 0;
|
||||
}
|
||||
adapter.exec("BEGIN");
|
||||
try {
|
||||
merged.decisions = countChanges(
|
||||
adapter
|
||||
.prepare(`
|
||||
INSERT OR REPLACE INTO decisions (
|
||||
id, when_context, scope, decision, choice, rationale, revisable, made_by, superseded_by
|
||||
)
|
||||
SELECT id, when_context, scope, decision, choice, rationale, revisable, ${hasMadeBy ? "made_by" : "'agent'"}, superseded_by FROM wt.decisions
|
||||
`)
|
||||
.run(),
|
||||
);
|
||||
merged.requirements = countChanges(
|
||||
adapter
|
||||
.prepare(`
|
||||
INSERT OR REPLACE INTO requirements (
|
||||
id, class, status, description, why, source, primary_owner,
|
||||
supporting_slices, validation, notes, full_content, superseded_by
|
||||
)
|
||||
SELECT id, class, status, description, why, source, primary_owner,
|
||||
supporting_slices, validation, notes, full_content, superseded_by
|
||||
FROM wt.requirements
|
||||
`)
|
||||
.run(),
|
||||
);
|
||||
merged.artifacts = countChanges(
|
||||
adapter
|
||||
.prepare(`
|
||||
INSERT OR REPLACE INTO artifacts (
|
||||
path, artifact_type, milestone_id, slice_id, task_id, full_content, imported_at
|
||||
)
|
||||
SELECT path, artifact_type, milestone_id, slice_id, task_id, full_content, imported_at
|
||||
FROM wt.artifacts
|
||||
`)
|
||||
.run(),
|
||||
);
|
||||
// Merge milestones — worktree may have updated status/planning fields
|
||||
merged.milestones = countChanges(
|
||||
adapter
|
||||
.prepare(`
|
||||
INSERT OR REPLACE INTO milestones (
|
||||
id, title, status, depends_on, created_at, completed_at,
|
||||
vision, success_criteria, key_risks, proof_strategy,
|
||||
verification_contract, verification_integration, verification_operational, verification_uat,
|
||||
definition_of_done, requirement_coverage, boundary_map_markdown, vision_meeting_json, product_research_json
|
||||
)
|
||||
SELECT id, title, status, depends_on, created_at, completed_at,
|
||||
vision, success_criteria, key_risks, proof_strategy,
|
||||
verification_contract, verification_integration, verification_operational, verification_uat,
|
||||
definition_of_done, requirement_coverage, boundary_map_markdown, vision_meeting_json, ${hasProductResearch ? "product_research_json" : "''"}
|
||||
FROM wt.milestones
|
||||
`)
|
||||
.run(),
|
||||
);
|
||||
// Merge slices — preserve worktree progress but never downgrade completed status (#2558).
|
||||
// Uses INSERT OR REPLACE with a subquery that picks the best status — if the main DB
|
||||
// already has a completed slice, keep that status even if the worktree copy is stale.
|
||||
merged.slices = countChanges(
|
||||
adapter
|
||||
.prepare(`
|
||||
INSERT OR REPLACE INTO slices (
|
||||
milestone_id, id, title, status, risk, depends, demo, created_at, completed_at,
|
||||
full_summary_md, full_uat_md, goal, success_criteria, proof_level,
|
||||
integration_closure, observability_impact, adversarial_partner, adversarial_combatant,
|
||||
adversarial_architect, planning_meeting_json, sequence, replan_triggered_at
|
||||
)
|
||||
SELECT w.milestone_id, w.id, w.title,
|
||||
CASE
|
||||
WHEN m.status IN ('complete', 'done') AND w.status NOT IN ('complete', 'done')
|
||||
THEN m.status ELSE w.status
|
||||
END,
|
||||
w.risk, w.depends, w.demo, w.created_at,
|
||||
CASE
|
||||
WHEN m.status IN ('complete', 'done') AND w.status NOT IN ('complete', 'done')
|
||||
THEN m.completed_at ELSE w.completed_at
|
||||
END,
|
||||
w.full_summary_md, w.full_uat_md, w.goal, w.success_criteria, w.proof_level,
|
||||
w.integration_closure, w.observability_impact, w.adversarial_partner, w.adversarial_combatant,
|
||||
w.adversarial_architect, w.planning_meeting_json, w.sequence, w.replan_triggered_at
|
||||
FROM wt.slices w
|
||||
LEFT JOIN slices m ON m.milestone_id = w.milestone_id AND m.id = w.id
|
||||
`)
|
||||
.run(),
|
||||
);
|
||||
// Merge tasks — preserve execution results, never downgrade completed status (#2558)
|
||||
merged.tasks = countChanges(
|
||||
adapter
|
||||
.prepare(`
|
||||
INSERT OR REPLACE INTO tasks (
|
||||
milestone_id, slice_id, id, title, status, one_liner, narrative,
|
||||
verification_result, duration, completed_at, blocker_discovered,
|
||||
deviations, known_issues, key_files, key_decisions, full_summary_md,
|
||||
description, estimate, files, verify, inputs, expected_output,
|
||||
observability_impact, full_plan_md, sequence
|
||||
)
|
||||
SELECT w.milestone_id, w.slice_id, w.id, w.title,
|
||||
CASE
|
||||
WHEN m.status IN ('complete', 'done') AND w.status NOT IN ('complete', 'done')
|
||||
THEN m.status ELSE w.status
|
||||
END,
|
||||
w.one_liner, w.narrative,
|
||||
w.verification_result, w.duration,
|
||||
CASE
|
||||
WHEN m.status IN ('complete', 'done') AND w.status NOT IN ('complete', 'done')
|
||||
THEN m.completed_at ELSE w.completed_at
|
||||
END,
|
||||
w.blocker_discovered,
|
||||
w.deviations, w.known_issues, w.key_files, w.key_decisions, w.full_summary_md,
|
||||
w.description, w.estimate, w.files, w.verify, w.inputs, w.expected_output,
|
||||
w.observability_impact, w.full_plan_md, w.sequence
|
||||
FROM wt.tasks w
|
||||
LEFT JOIN tasks m ON m.milestone_id = w.milestone_id AND m.slice_id = w.slice_id AND m.id = w.id
|
||||
`)
|
||||
.run(),
|
||||
);
|
||||
// Merge memories — keep worktree-learned insights
|
||||
merged.memories = countChanges(
|
||||
adapter
|
||||
.prepare(`
|
||||
INSERT OR REPLACE INTO memories (
|
||||
seq, id, category, content, confidence, source_unit_type, source_unit_id,
|
||||
created_at, updated_at, superseded_by, hit_count
|
||||
)
|
||||
SELECT seq, id, category, content, confidence, source_unit_type, source_unit_id,
|
||||
created_at, updated_at, superseded_by, hit_count
|
||||
FROM wt.memories
|
||||
`)
|
||||
.run(),
|
||||
);
|
||||
// Merge verification evidence — append-only, use INSERT OR IGNORE to avoid duplicates
|
||||
merged.verification_evidence = countChanges(
|
||||
adapter
|
||||
.prepare(`
|
||||
INSERT OR IGNORE INTO verification_evidence (
|
||||
task_id, slice_id, milestone_id, command, exit_code, verdict, duration_ms, created_at
|
||||
)
|
||||
SELECT task_id, slice_id, milestone_id, command, exit_code, verdict, duration_ms, created_at
|
||||
FROM wt.verification_evidence
|
||||
`)
|
||||
.run(),
|
||||
);
|
||||
adapter.exec("COMMIT");
|
||||
} catch (txErr) {
|
||||
try {
|
||||
adapter.exec("ROLLBACK");
|
||||
} catch (e) {
|
||||
logWarning("db", `rollback failed: ${e.message}`);
|
||||
}
|
||||
throw txErr;
|
||||
}
|
||||
return { ...merged, conflicts };
|
||||
} finally {
|
||||
try {
|
||||
adapter.exec("DETACH DATABASE wt");
|
||||
} catch (e) {
|
||||
logWarning("db", `detach worktree DB failed: ${e.message}`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logError("db", "worktree DB reconciliation failed", {
|
||||
error: err.message,
|
||||
});
|
||||
return { ...zero, conflicts };
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
Add table
Reference in a new issue