diff --git a/docs/user-docs/troubleshooting.md b/docs/user-docs/troubleshooting.md index a34463538..5eabcddcd 100644 --- a/docs/user-docs/troubleshooting.md +++ b/docs/user-docs/troubleshooting.md @@ -344,7 +344,7 @@ Doctor rebuilds `STATE.md` from plan and roadmap files on disk and fixes detecte ### "SF database is not available" -**Symptoms:** `sf_decision_save` (or its alias `sf_save_decision`), `sf_requirement_update` (or `sf_update_requirement`), or `sf_summary_save` (or `sf_save_summary`) fail with this error. +**Symptoms:** `sf_decision_save`, `sf_requirement_update`, or `sf_summary_save` fail with this error. **Cause:** The SQLite database wasn't initialized. This happens in manual `/sf` sessions (non-auto mode) on versions before v2.29. diff --git a/docs/zh-CN/user-docs/troubleshooting.md b/docs/zh-CN/user-docs/troubleshooting.md index e7478c9b1..f780980c2 100644 --- a/docs/zh-CN/user-docs/troubleshooting.md +++ b/docs/zh-CN/user-docs/troubleshooting.md @@ -361,7 +361,7 @@ Doctor 会从磁盘上的 plan 和 roadmap 文件重建 `STATE.md`,并修复 ### “SF database is not available” -**症状:** `sf_decision_save`(及其别名 `sf_save_decision`)、`sf_requirement_update`(及其别名 `sf_update_requirement`)或 `sf_summary_save`(及其别名 `sf_save_summary`)报这个错误。 +**症状:** `sf_decision_save`、`sf_requirement_update` 或 `sf_summary_save` 报这个错误。 **原因:** SQLite 数据库未初始化。这个问题会出现在 v2.29 之前的手动 `/sf` 会话(非自动模式)中。 diff --git a/packages/mcp-server/README.md b/packages/mcp-server/README.md index 7f754d5c3..6fb35d37c 100644 --- a/packages/mcp-server/README.md +++ b/packages/mcp-server/README.md @@ -80,27 +80,19 @@ Add to `.cursor/mcp.json`: The workflow MCP surface includes: - `sf_decision_save` -- `sf_save_decision` - `sf_requirement_update` -- `sf_update_requirement` - `sf_requirement_save` -- `sf_save_requirement` - `sf_milestone_generate_id` - `sf_plan_milestone` - `sf_plan_slice` - `sf_plan_task` -- `sf_task_plan` - `sf_replan_slice` -- `sf_slice_replan` - `sf_task_complete` - `sf_slice_complete` - `sf_skip_slice` - `sf_validate_milestone` -- `sf_milestone_validate` - `sf_complete_milestone` -- `sf_milestone_complete` - `sf_reassess_roadmap` -- `sf_roadmap_reassess` - `sf_save_gate_result` - `sf_summary_save` - `sf_milestone_status` diff --git a/packages/mcp-server/src/workflow-tools.test.ts b/packages/mcp-server/src/workflow-tools.test.ts index 44969951b..5e512b75f 100644 --- a/packages/mcp-server/src/workflow-tools.test.ts +++ b/packages/mcp-server/src/workflow-tools.test.ts @@ -1,1025 +1,1303 @@ -import { describe, it } from 'vitest'; import assert from "node:assert/strict"; -import { mkdirSync, rmSync, writeFileSync, existsSync } from "node:fs"; -import { join } from "node:path"; -import { tmpdir } from "node:os"; import { randomUUID } from "node:crypto"; +import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { describe, it } from "vitest"; -import { _getAdapter, closeDatabase } from "../../../src/resources/extensions/sf/sf-db.ts"; -import { registerWorkflowTools, WORKFLOW_TOOL_NAMES, _resetWorkflowModuleState } from "./workflow-tools.ts"; +import { + _getAdapter, + closeDatabase, +} from "../../../src/resources/extensions/sf/sf-db.ts"; +import { + _resetWorkflowModuleState, + registerWorkflowTools, + WORKFLOW_TOOL_NAMES, +} from "./workflow-tools.ts"; + +const REMOVED_WORKFLOW_TOOL_NAMES = [ + "sf_complete_task", + "sf_complete_slice", + "sf_generate_milestone_id", + "sf_save_decision", + "sf_update_requirement", + "sf_save_requirement", + "sf_save_summary", + "sf_milestone_plan", + "sf_slice_plan", + "sf_task_plan", + "sf_slice_replan", + "sf_roadmap_reassess", + "sf_milestone_complete", + "sf_milestone_validate", +] as const; function makeTmpBase(): string { - const base = join(tmpdir(), `sf-mcp-workflow-${randomUUID()}`); - mkdirSync(join(base, ".sf"), { recursive: true }); - return base; + const base = join(tmpdir(), `sf-mcp-workflow-${randomUUID()}`); + mkdirSync(join(base, ".sf"), { recursive: true }); + return base; } function cleanup(base: string): void { - try { - closeDatabase(); - } catch { - // swallow - } - try { - rmSync(base, { recursive: true, force: true }); - } catch { - // swallow - } + try { + closeDatabase(); + } catch { + // swallow + } + try { + rmSync(base, { recursive: true, force: true }); + } catch { + // swallow + } } function writeWriteGateSnapshot( - base: string, - snapshot: { verifiedDepthMilestones?: string[]; activeQueuePhase?: boolean; pendingGateId?: string | null }, + base: string, + snapshot: { + verifiedDepthMilestones?: string[]; + activeQueuePhase?: boolean; + pendingGateId?: string | null; + }, ): void { - mkdirSync(join(base, ".sf", "runtime"), { recursive: true }); - writeFileSync( - join(base, ".sf", "runtime", "write-gate-state.json"), - JSON.stringify( - { - verifiedDepthMilestones: snapshot.verifiedDepthMilestones ?? [], - activeQueuePhase: snapshot.activeQueuePhase ?? false, - pendingGateId: snapshot.pendingGateId ?? null, - }, - null, - 2, - ), - "utf-8", - ); + mkdirSync(join(base, ".sf", "runtime"), { recursive: true }); + writeFileSync( + join(base, ".sf", "runtime", "write-gate-state.json"), + JSON.stringify( + { + verifiedDepthMilestones: snapshot.verifiedDepthMilestones ?? [], + activeQueuePhase: snapshot.activeQueuePhase ?? false, + pendingGateId: snapshot.pendingGateId ?? null, + }, + null, + 2, + ), + "utf-8", + ); } function makeMockServer() { - const tools: Array<{ - name: string; - description: string; - params: Record; - handler: (args: Record) => Promise; - }> = []; - return { - tools, - tool( - name: string, - description: string, - params: Record, - handler: (args: Record) => Promise, - ) { - tools.push({ name, description, params, handler }); - }, - }; + const tools: Array<{ + name: string; + description: string; + params: Record; + handler: (args: Record) => Promise; + }> = []; + return { + tools, + tool( + name: string, + description: string, + params: Record, + handler: (args: Record) => Promise, + ) { + tools.push({ name, description, params, handler }); + }, + }; } function validPlanningMeeting() { - return { - trigger: "MCP workflow test needs a recorded slice-planning decision.", - pm: "Keep this test slice narrow and focused on one workflow path.", - userAdvocate: "Users need the MCP path to preserve planning context.", - customerPanel: "Operators and maintainers both need durable plan artifacts.", - business: "Reliable planning reduces wasted automation runs.", - researcher: "The MCP server delegates to the shared workflow executors.", - deliveryLead: "Use one small task to keep the integration proof bounded.", - partner: "The test covers the DB-backed render path.", - combatant: "Missing meetings would allow silent planning-context loss.", - architect: "Schema and runtime validation should agree on the meeting contract.", - moderator: "Proceed with the focused planning proof.", - recommendedRoute: "planning", - confidenceSummary: "High confidence for this test fixture.", - }; + return { + trigger: "MCP workflow test needs a recorded slice-planning decision.", + pm: "Keep this test slice narrow and focused on one workflow path.", + userAdvocate: "Users need the MCP path to preserve planning context.", + customerPanel: + "Operators and maintainers both need durable plan artifacts.", + business: "Reliable planning reduces wasted automation runs.", + researcher: "The MCP server delegates to the shared workflow executors.", + deliveryLead: "Use one small task to keep the integration proof bounded.", + partner: "The test covers the DB-backed render path.", + combatant: "Missing meetings would allow silent planning-context loss.", + architect: + "Schema and runtime validation should agree on the meeting contract.", + moderator: "Proceed with the focused planning proof.", + recommendedRoute: "planning", + confidenceSummary: "High confidence for this test fixture.", + }; } describe("workflow MCP tools", () => { - it("registers the full headless-safe workflow tool surface", () => { - const server = makeMockServer(); - registerWorkflowTools(server as any); + it("registers the full headless-safe workflow tool surface", () => { + const server = makeMockServer(); + registerWorkflowTools(server as any); - assert.equal(server.tools.length, WORKFLOW_TOOL_NAMES.length); - assert.deepEqual(server.tools.map((t) => t.name), [...WORKFLOW_TOOL_NAMES]); - assert.ok(!server.tools.some((t) => t.name === "sf_complete_task")); - assert.ok(!server.tools.some((t) => t.name === "sf_complete_slice")); - assert.ok(!server.tools.some((t) => t.name === "sf_generate_milestone_id")); - }); + assert.equal(server.tools.length, WORKFLOW_TOOL_NAMES.length); + assert.deepEqual( + server.tools.map((t) => t.name), + [...WORKFLOW_TOOL_NAMES], + ); + for (const removedName of REMOVED_WORKFLOW_TOOL_NAMES) { + assert.ok( + !server.tools.some((t) => t.name === removedName), + `${removedName} should not be registered`, + ); + } + }); - it("sf_summary_save writes artifact through the shared executor", async () => { - const base = makeTmpBase(); - try { - const server = makeMockServer(); - registerWorkflowTools(server as any); - const tool = server.tools.find((t) => t.name === "sf_summary_save"); - assert.ok(tool, "summary tool should be registered"); - const originalCwd = process.cwd(); + it("sf_summary_save writes artifact through the shared executor", async () => { + const base = makeTmpBase(); + try { + const server = makeMockServer(); + registerWorkflowTools(server as any); + const tool = server.tools.find((t) => t.name === "sf_summary_save"); + assert.ok(tool, "summary tool should be registered"); + const originalCwd = process.cwd(); - const result = await tool!.handler({ - projectDir: base, - milestone_id: "M001", - slice_id: "S01", - artifact_type: "SUMMARY", - content: "# Summary\n\nHello", - }); + const result = await tool!.handler({ + projectDir: base, + milestone_id: "M001", + slice_id: "S01", + artifact_type: "SUMMARY", + content: "# Summary\n\nHello", + }); - const text = (result as any).content[0].text as string; - assert.match(text, /Saved SUMMARY artifact/); - assert.equal(process.cwd(), originalCwd, "workflow MCP tools should not mutate process.cwd"); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md")), - "summary file should exist on disk", - ); - } finally { - cleanup(base); - } - }); + const text = (result as any).content[0].text as string; + assert.match(text, /Saved SUMMARY artifact/); + assert.equal( + process.cwd(), + originalCwd, + "workflow MCP tools should not mutate process.cwd", + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M001", + "slices", + "S01", + "S01-SUMMARY.md", + ), + ), + "summary file should exist on disk", + ); + } finally { + cleanup(base); + } + }); - it("rejects workflow tool calls outside the configured project root", async () => { - const base = makeTmpBase(); - const otherBase = makeTmpBase(); - const prevRoot = process.env.SF_WORKFLOW_PROJECT_ROOT; - try { - process.env.SF_WORKFLOW_PROJECT_ROOT = base; - const server = makeMockServer(); - registerWorkflowTools(server as any); - const tool = server.tools.find((t) => t.name === "sf_summary_save"); - assert.ok(tool, "summary tool should be registered"); + it("rejects workflow tool calls outside the configured project root", async () => { + const base = makeTmpBase(); + const otherBase = makeTmpBase(); + const prevRoot = process.env.SF_WORKFLOW_PROJECT_ROOT; + try { + process.env.SF_WORKFLOW_PROJECT_ROOT = base; + const server = makeMockServer(); + registerWorkflowTools(server as any); + const tool = server.tools.find((t) => t.name === "sf_summary_save"); + assert.ok(tool, "summary tool should be registered"); - await assert.rejects( - () => - tool!.handler({ - projectDir: otherBase, - milestone_id: "M001", - artifact_type: "SUMMARY", - content: "# Summary", - }), - /configured workflow project root/, - ); - } finally { - if (prevRoot === undefined) { - delete process.env.SF_WORKFLOW_PROJECT_ROOT; - } else { - process.env.SF_WORKFLOW_PROJECT_ROOT = prevRoot; - } - cleanup(base); - cleanup(otherBase); - } - }); + await assert.rejects( + () => + tool!.handler({ + projectDir: otherBase, + milestone_id: "M001", + artifact_type: "SUMMARY", + content: "# Summary", + }), + /configured workflow project root/, + ); + } finally { + if (prevRoot === undefined) { + delete process.env.SF_WORKFLOW_PROJECT_ROOT; + } else { + process.env.SF_WORKFLOW_PROJECT_ROOT = prevRoot; + } + cleanup(base); + cleanup(otherBase); + } + }); - it("rejects non-file executor module URLs", async () => { - const base = makeTmpBase(); - const prevModule = process.env.SF_WORKFLOW_EXECUTORS_MODULE; - const prevRoot = process.env.SF_WORKFLOW_PROJECT_ROOT; - try { - process.env.SF_WORKFLOW_PROJECT_ROOT = base; - process.env.SF_WORKFLOW_EXECUTORS_MODULE = "data:text/javascript,export default {}"; - _resetWorkflowModuleState(); - const server = makeMockServer(); - registerWorkflowTools(server as any); - const tool = server.tools.find((t) => t.name === "sf_summary_save"); - assert.ok(tool, "summary tool should be registered"); + it("rejects non-file executor module URLs", async () => { + const base = makeTmpBase(); + const prevModule = process.env.SF_WORKFLOW_EXECUTORS_MODULE; + const prevRoot = process.env.SF_WORKFLOW_PROJECT_ROOT; + try { + process.env.SF_WORKFLOW_PROJECT_ROOT = base; + process.env.SF_WORKFLOW_EXECUTORS_MODULE = + "data:text/javascript,export default {}"; + _resetWorkflowModuleState(); + const server = makeMockServer(); + registerWorkflowTools(server as any); + const tool = server.tools.find((t) => t.name === "sf_summary_save"); + assert.ok(tool, "summary tool should be registered"); - await assert.rejects( - () => - tool!.handler({ - projectDir: base, - milestone_id: "M001", - artifact_type: "SUMMARY", - content: "# Summary", - }), - /only supports file: URLs or filesystem paths/, - ); - } finally { - if (prevModule === undefined) { - delete process.env.SF_WORKFLOW_EXECUTORS_MODULE; - } else { - process.env.SF_WORKFLOW_EXECUTORS_MODULE = prevModule; - } - if (prevRoot === undefined) { - delete process.env.SF_WORKFLOW_PROJECT_ROOT; - } else { - process.env.SF_WORKFLOW_PROJECT_ROOT = prevRoot; - } - _resetWorkflowModuleState(); - cleanup(base); - } - }); + await assert.rejects( + () => + tool!.handler({ + projectDir: base, + milestone_id: "M001", + artifact_type: "SUMMARY", + content: "# Summary", + }), + /only supports file: URLs or filesystem paths/, + ); + } finally { + if (prevModule === undefined) { + delete process.env.SF_WORKFLOW_EXECUTORS_MODULE; + } else { + process.env.SF_WORKFLOW_EXECUTORS_MODULE = prevModule; + } + if (prevRoot === undefined) { + delete process.env.SF_WORKFLOW_PROJECT_ROOT; + } else { + process.env.SF_WORKFLOW_PROJECT_ROOT = prevRoot; + } + _resetWorkflowModuleState(); + cleanup(base); + } + }); - it("blocks workflow mutation tools while a discussion gate is pending", async () => { - const base = makeTmpBase(); - try { - mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); - writeFileSync( - join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), - "# S01\n\n- [ ] **T01: Demo** `est:5m`\n", - ); - writeWriteGateSnapshot(base, { pendingGateId: "depth_verification_M001_confirm" }); + it("blocks workflow mutation tools while a discussion gate is pending", async () => { + const base = makeTmpBase(); + try { + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { + recursive: true, + }); + writeFileSync( + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + "# S01\n\n- [ ] **T01: Demo** `est:5m`\n", + ); + writeWriteGateSnapshot(base, { + pendingGateId: "depth_verification_M001_confirm", + }); - const server = makeMockServer(); - registerWorkflowTools(server as any); - const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); - assert.ok(taskTool, "task tool should be registered"); + const server = makeMockServer(); + registerWorkflowTools(server as any); + const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); + assert.ok(taskTool, "task tool should be registered"); - await assert.rejects( - () => - taskTool!.handler({ - projectDir: base, - taskId: "T01", - sliceId: "S01", - milestoneId: "M001", - oneLiner: "Completed task", - narrative: "Did the work", - verification: "npm test", - }), - /Discussion gate .* has not been confirmed/, - ); - } finally { - cleanup(base); - } - }); + await assert.rejects( + () => + taskTool!.handler({ + projectDir: base, + taskId: "T01", + sliceId: "S01", + milestoneId: "M001", + oneLiner: "Completed task", + narrative: "Did the work", + verification: "npm test", + }), + /Discussion gate .* has not been confirmed/, + ); + } finally { + cleanup(base); + } + }); - it("blocks workflow mutation tools during queue mode", async () => { - const base = makeTmpBase(); - try { - mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); - writeFileSync( - join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), - "# S01\n\n- [ ] **T01: Demo** `est:5m`\n", - ); - writeWriteGateSnapshot(base, { activeQueuePhase: true }); + it("blocks workflow mutation tools during queue mode", async () => { + const base = makeTmpBase(); + try { + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { + recursive: true, + }); + writeFileSync( + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + "# S01\n\n- [ ] **T01: Demo** `est:5m`\n", + ); + writeWriteGateSnapshot(base, { activeQueuePhase: true }); - const server = makeMockServer(); - registerWorkflowTools(server as any); - const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); - assert.ok(taskTool, "task tool should be registered"); + const server = makeMockServer(); + registerWorkflowTools(server as any); + const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); + assert.ok(taskTool, "task tool should be registered"); - await assert.rejects( - () => - taskTool!.handler({ - projectDir: base, - taskId: "T01", - sliceId: "S01", - milestoneId: "M001", - oneLiner: "Completed task", - narrative: "Did the work", - verification: "npm test", - }), - /planning tool .* not executes work|Cannot sf_task_complete|Unknown tools are not permitted during queue mode/, - ); - } finally { - cleanup(base); - } - }); + await assert.rejects( + () => + taskTool!.handler({ + projectDir: base, + taskId: "T01", + sliceId: "S01", + milestoneId: "M001", + oneLiner: "Completed task", + narrative: "Did the work", + verification: "npm test", + }), + /planning tool .* not executes work|Cannot sf_task_complete|Unknown tools are not permitted during queue mode/, + ); + } finally { + cleanup(base); + } + }); - it("sf_task_complete and sf_milestone_status work end-to-end", async () => { - const base = makeTmpBase(); - try { - mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { recursive: true }); - writeFileSync( - join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), - "# S01\n\n- [ ] **T01: Demo** `est:5m`\n", - ); + it("sf_task_complete and sf_milestone_status work end-to-end", async () => { + const base = makeTmpBase(); + try { + mkdirSync(join(base, ".sf", "milestones", "M001", "slices", "S01"), { + recursive: true, + }); + writeFileSync( + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + "# S01\n\n- [ ] **T01: Demo** `est:5m`\n", + ); - const server = makeMockServer(); - registerWorkflowTools(server as any); - const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); - const statusTool = server.tools.find((t) => t.name === "sf_milestone_status"); - assert.ok(taskTool, "task tool should be registered"); - assert.ok(statusTool, "status tool should be registered"); + const server = makeMockServer(); + registerWorkflowTools(server as any); + const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); + const statusTool = server.tools.find( + (t) => t.name === "sf_milestone_status", + ); + assert.ok(taskTool, "task tool should be registered"); + assert.ok(statusTool, "status tool should be registered"); - const taskResult = await taskTool!.handler({ - projectDir: base, - taskId: "T01", - sliceId: "S01", - milestoneId: "M001", - oneLiner: "Completed task", - narrative: "Did the work", - verification: "npm test", - }); + const taskResult = await taskTool!.handler({ + projectDir: base, + taskId: "T01", + sliceId: "S01", + milestoneId: "M001", + oneLiner: "Completed task", + narrative: "Did the work", + verification: "npm test", + }); - assert.match((taskResult as any).content[0].text as string, /Completed task T01/); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-SUMMARY.md")), - "task summary should be written to disk", - ); + assert.match( + (taskResult as any).content[0].text as string, + /Completed task T01/, + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M001", + "slices", + "S01", + "tasks", + "T01-SUMMARY.md", + ), + ), + "task summary should be written to disk", + ); - const statusResult = await statusTool!.handler({ - projectDir: base, - milestoneId: "M001", - }); - const parsed = JSON.parse((statusResult as any).content[0].text as string); - assert.equal(parsed.milestoneId, "M001"); - assert.equal(parsed.sliceCount, 1); - assert.equal(parsed.slices[0].id, "S01"); - } finally { - cleanup(base); - } - }); + const statusResult = await statusTool!.handler({ + projectDir: base, + milestoneId: "M001", + }); + const parsed = JSON.parse( + (statusResult as any).content[0].text as string, + ); + assert.equal(parsed.milestoneId, "M001"); + assert.equal(parsed.sliceCount, 1); + assert.equal(parsed.slices[0].id, "S01"); + } finally { + cleanup(base); + } + }); - it("sf_plan_milestone and sf_plan_slice work end-to-end", async () => { - const base = makeTmpBase(); - try { - const server = makeMockServer(); - registerWorkflowTools(server as any); - const milestoneTool = server.tools.find((t) => t.name === "sf_plan_milestone"); - const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); - assert.ok(milestoneTool, "milestone planning tool should be registered"); - assert.ok(sliceTool, "slice planning tool should be registered"); + it("sf_plan_milestone and sf_plan_slice work end-to-end", async () => { + const base = makeTmpBase(); + try { + const server = makeMockServer(); + registerWorkflowTools(server as any); + const milestoneTool = server.tools.find( + (t) => t.name === "sf_plan_milestone", + ); + const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); + assert.ok(milestoneTool, "milestone planning tool should be registered"); + assert.ok(sliceTool, "slice planning tool should be registered"); - const milestoneResult = await milestoneTool!.handler({ - projectDir: base, - milestoneId: "M001", - title: "Workflow MCP planning", - vision: "Plan milestone over MCP.", - slices: [ - { - sliceId: "S01", - title: "Bridge planning", - risk: "medium", - depends: [], - demo: "Milestone plan persists through MCP.", - goal: "Persist roadmap state.", - successCriteria: "ROADMAP.md renders from DB.", - proofLevel: "integration", - integrationClosure: "Prompts and MCP call the same handler.", - observabilityImpact: "Executor tests cover output paths.", - }, - ], - }); - assert.match((milestoneResult as any).content[0].text as string, /Planned milestone M001/); + const milestoneResult = await milestoneTool!.handler({ + projectDir: base, + milestoneId: "M001", + title: "Workflow MCP planning", + vision: "Plan milestone over MCP.", + slices: [ + { + sliceId: "S01", + title: "Bridge planning", + risk: "medium", + depends: [], + demo: "Milestone plan persists through MCP.", + goal: "Persist roadmap state.", + successCriteria: "ROADMAP.md renders from DB.", + proofLevel: "integration", + integrationClosure: "Prompts and MCP call the same handler.", + observabilityImpact: "Executor tests cover output paths.", + }, + ], + }); + assert.match( + (milestoneResult as any).content[0].text as string, + /Planned milestone M001/, + ); - const sliceResult = await sliceTool!.handler({ - projectDir: base, - milestoneId: "M001", - sliceId: "S01", - goal: "Persist slice plan over MCP.", - planningMeeting: validPlanningMeeting(), - tasks: [ - { - taskId: "T01", - title: "Add planning bridge", - description: "Implement the shared executor path.", - estimate: "15m", - files: ["src/resources/extensions/sf/tools/workflow-tool-executors.ts"], - verify: "node --test", - inputs: ["ROADMAP.md"], - expectedOutput: ["S01-PLAN.md", "T01-PLAN.md"], - }, - ], - }); - assert.match((sliceResult as any).content[0].text as string, /Planned slice S01/); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md")), - "slice plan should exist on disk", - ); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks", "T01-PLAN.md")), - "task plan should exist on disk", - ); - } finally { - cleanup(base); - } - }); + const sliceResult = await sliceTool!.handler({ + projectDir: base, + milestoneId: "M001", + sliceId: "S01", + goal: "Persist slice plan over MCP.", + planningMeeting: validPlanningMeeting(), + tasks: [ + { + taskId: "T01", + title: "Add planning bridge", + description: "Implement the shared executor path.", + estimate: "15m", + files: [ + "src/resources/extensions/sf/tools/workflow-tool-executors.ts", + ], + verify: "node --test", + inputs: ["ROADMAP.md"], + expectedOutput: ["S01-PLAN.md", "T01-PLAN.md"], + }, + ], + }); + assert.match( + (sliceResult as any).content[0].text as string, + /Planned slice S01/, + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M001", + "slices", + "S01", + "S01-PLAN.md", + ), + ), + "slice plan should exist on disk", + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M001", + "slices", + "S01", + "tasks", + "T01-PLAN.md", + ), + ), + "task plan should exist on disk", + ); + } finally { + cleanup(base); + } + }); - it("sf_requirement_save opens the DB before inline requirement writes", async () => { - const base = makeTmpBase(); - try { - const server = makeMockServer(); - registerWorkflowTools(server as any); - const requirementTool = server.tools.find((t) => t.name === "sf_requirement_save"); - assert.ok(requirementTool, "requirement tool should be registered"); + it("sf_requirement_save opens the DB before inline requirement writes", async () => { + const base = makeTmpBase(); + try { + const server = makeMockServer(); + registerWorkflowTools(server as any); + const requirementTool = server.tools.find( + (t) => t.name === "sf_requirement_save", + ); + assert.ok(requirementTool, "requirement tool should be registered"); - closeDatabase(); + closeDatabase(); - const result = await requirementTool!.handler({ - projectDir: base, - class: "operability", - description: "Inline MCP requirement save regression", - why: "Reproduce missing ensureDbOpen in workflow-tools", - source: "user", - status: "active", - primary_owner: "M010/S10", - validation: "n/a", - }); + const result = await requirementTool!.handler({ + projectDir: base, + class: "operability", + description: "Inline MCP requirement save regression", + why: "Reproduce missing ensureDbOpen in workflow-tools", + source: "user", + status: "active", + primary_owner: "M010/S10", + validation: "n/a", + }); - assert.match((result as any).content[0].text as string, /Saved requirement R\d+/); - assert.ok(existsSync(join(base, ".sf", "REQUIREMENTS.md")), "REQUIREMENTS.md should be written to disk"); - const row = _getAdapter()! - .prepare("SELECT id, class, description FROM requirements WHERE description = ?") - .get("Inline MCP requirement save regression") as Record | undefined; - assert.ok(row, "requirement should be written to the database"); - assert.equal(row["class"], "operability"); - } finally { - cleanup(base); - } - }); + assert.match( + (result as any).content[0].text as string, + /Saved requirement R\d+/, + ); + assert.ok( + existsSync(join(base, ".sf", "REQUIREMENTS.md")), + "REQUIREMENTS.md should be written to disk", + ); + const row = _getAdapter()! + .prepare( + "SELECT id, class, description FROM requirements WHERE description = ?", + ) + .get("Inline MCP requirement save regression") as + | Record + | undefined; + assert.ok(row, "requirement should be written to the database"); + assert.equal(row["class"], "operability"); + } finally { + cleanup(base); + } + }); - it("sf_plan_task reopens the DB before inline task planning writes", async () => { - const base = makeTmpBase(); - try { - const server = makeMockServer(); - registerWorkflowTools(server as any); - const milestoneTool = server.tools.find((t) => t.name === "sf_plan_milestone"); - const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); - const taskTool = server.tools.find((t) => t.name === "sf_plan_task"); - assert.ok(milestoneTool, "milestone planning tool should be registered"); - assert.ok(sliceTool, "slice planning tool should be registered"); - assert.ok(taskTool, "task planning tool should be registered"); + it("sf_plan_task reopens the DB before inline task planning writes", async () => { + const base = makeTmpBase(); + try { + const server = makeMockServer(); + registerWorkflowTools(server as any); + const milestoneTool = server.tools.find( + (t) => t.name === "sf_plan_milestone", + ); + const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); + const taskTool = server.tools.find((t) => t.name === "sf_plan_task"); + assert.ok(milestoneTool, "milestone planning tool should be registered"); + assert.ok(sliceTool, "slice planning tool should be registered"); + assert.ok(taskTool, "task planning tool should be registered"); - await milestoneTool!.handler({ - projectDir: base, - milestoneId: "M010", - title: "Inline task planning DB reopen", - vision: "Seed a slice, close the DB, then plan another task inline.", - slices: [ - { - sliceId: "S10", - title: "Inline task planning", - risk: "medium", - depends: [], - demo: "Inline sf_plan_task reopens the DB after it was closed.", - goal: "Preserve MCP task planning after the DB adapter is closed.", - successCriteria: "The second task plan persists after a closed DB is reopened.", - proofLevel: "integration", - integrationClosure: "The inline MCP handler reopens the DB before planning.", - observabilityImpact: "workflow-tools MCP tests cover the inline reopen path.", - }, - ], - }); - await sliceTool!.handler({ - projectDir: base, - milestoneId: "M010", - sliceId: "S10", - goal: "Create the initial slice plan before closing the DB.", - planningMeeting: validPlanningMeeting(), - tasks: [ - { - taskId: "T10", - title: "Seed existing task", - description: "Create the initial task plan before closing the DB.", - estimate: "5m", - files: ["packages/mcp-server/src/workflow-tools.ts"], - verify: "node --test", - inputs: ["M010-ROADMAP.md"], - expectedOutput: ["T10-PLAN.md"], - }, - ], - }); + await milestoneTool!.handler({ + projectDir: base, + milestoneId: "M010", + title: "Inline task planning DB reopen", + vision: "Seed a slice, close the DB, then plan another task inline.", + slices: [ + { + sliceId: "S10", + title: "Inline task planning", + risk: "medium", + depends: [], + demo: "Inline sf_plan_task reopens the DB after it was closed.", + goal: "Preserve MCP task planning after the DB adapter is closed.", + successCriteria: + "The second task plan persists after a closed DB is reopened.", + proofLevel: "integration", + integrationClosure: + "The inline MCP handler reopens the DB before planning.", + observabilityImpact: + "workflow-tools MCP tests cover the inline reopen path.", + }, + ], + }); + await sliceTool!.handler({ + projectDir: base, + milestoneId: "M010", + sliceId: "S10", + goal: "Create the initial slice plan before closing the DB.", + planningMeeting: validPlanningMeeting(), + tasks: [ + { + taskId: "T10", + title: "Seed existing task", + description: "Create the initial task plan before closing the DB.", + estimate: "5m", + files: ["packages/mcp-server/src/workflow-tools.ts"], + verify: "node --test", + inputs: ["M010-ROADMAP.md"], + expectedOutput: ["T10-PLAN.md"], + }, + ], + }); - closeDatabase(); + closeDatabase(); - const result = await taskTool!.handler({ - projectDir: base, - milestoneId: "M010", - sliceId: "S10", - taskId: "T11", - title: "Reopen and plan", - description: "Exercise the inline plan-task path after the DB was closed.", - estimate: "5m", - files: ["packages/mcp-server/src/workflow-tools.ts"], - verify: "node --test", - inputs: ["M010-ROADMAP.md", "S10-PLAN.md"], - expectedOutput: ["T11-PLAN.md"], - }); + const result = await taskTool!.handler({ + projectDir: base, + milestoneId: "M010", + sliceId: "S10", + taskId: "T11", + title: "Reopen and plan", + description: + "Exercise the inline plan-task path after the DB was closed.", + estimate: "5m", + files: ["packages/mcp-server/src/workflow-tools.ts"], + verify: "node --test", + inputs: ["M010-ROADMAP.md", "S10-PLAN.md"], + expectedOutput: ["T11-PLAN.md"], + }); - assert.match((result as any).content[0].text as string, /Planned task T11/); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M010", "slices", "S10", "tasks", "T11-PLAN.md")), - "T11 plan should be written after reopening the DB", - ); - } finally { - cleanup(base); - } - }); + assert.match( + (result as any).content[0].text as string, + /Planned task T11/, + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M010", + "slices", + "S10", + "tasks", + "T11-PLAN.md", + ), + ), + "T11 plan should be written after reopening the DB", + ); + } finally { + cleanup(base); + } + }); - it("sf_replan_slice and sf_slice_replan work end-to-end", async () => { - const base = makeTmpBase(); - try { - const server = makeMockServer(); - registerWorkflowTools(server as any); - const milestoneTool = server.tools.find((t) => t.name === "sf_plan_milestone"); - const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); - const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); - const canonicalTool = server.tools.find((t) => t.name === "sf_replan_slice"); - const aliasTool = server.tools.find((t) => t.name === "sf_slice_replan"); - assert.ok(milestoneTool, "milestone planning tool should be registered"); - assert.ok(sliceTool, "slice planning tool should be registered"); - assert.ok(taskTool, "task completion tool should be registered"); - assert.ok(canonicalTool, "slice replanning tool should be registered"); - assert.ok(aliasTool, "slice replanning alias should be registered"); + it("sf_replan_slice works end-to-end", async () => { + const base = makeTmpBase(); + try { + const server = makeMockServer(); + registerWorkflowTools(server as any); + const milestoneTool = server.tools.find( + (t) => t.name === "sf_plan_milestone", + ); + const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); + const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); + const replanTool = server.tools.find((t) => t.name === "sf_replan_slice"); + assert.ok(milestoneTool, "milestone planning tool should be registered"); + assert.ok(sliceTool, "slice planning tool should be registered"); + assert.ok(taskTool, "task completion tool should be registered"); + assert.ok(replanTool, "slice replanning tool should be registered"); - await milestoneTool!.handler({ - projectDir: base, - milestoneId: "M099", - title: "Slice replanning", - vision: "Drive replan parity over MCP.", - slices: [ - { - sliceId: "S09", - title: "Replan slice", - risk: "medium", - depends: [], - demo: "Slice replans after a blocker task completes.", - goal: "Prepare replan state.", - successCriteria: "Plan and replan artifacts update over MCP.", - proofLevel: "integration", - integrationClosure: "Replan uses the shared executor path.", - observabilityImpact: "Tests cover replan artifacts.", - }, - ], - }); - await sliceTool!.handler({ - projectDir: base, - milestoneId: "M099", - sliceId: "S09", - goal: "Plan a slice that will be replanned.", - planningMeeting: validPlanningMeeting(), - tasks: [ - { - taskId: "T09", - title: "Blocker task", - description: "Finish the blocker-discovery task.", - estimate: "5m", - files: ["src/blocker.ts"], - verify: "node --test", - inputs: ["M099-ROADMAP.md"], - expectedOutput: ["T09-SUMMARY.md"], - }, - { - taskId: "T10", - title: "Pending task", - description: "Original follow-up task.", - estimate: "10m", - files: ["src/pending.ts"], - verify: "node --test", - inputs: ["S09-PLAN.md"], - expectedOutput: ["Updated plan"], - }, - ], - }); - await taskTool!.handler({ - projectDir: base, - milestoneId: "M099", - sliceId: "S09", - taskId: "T09", - oneLiner: "Completed blocker task", - narrative: "Prepared the slice for replanning.", - verification: "node --test", - }); + await milestoneTool!.handler({ + projectDir: base, + milestoneId: "M099", + title: "Slice replanning", + vision: "Drive replan parity over MCP.", + slices: [ + { + sliceId: "S09", + title: "Replan slice", + risk: "medium", + depends: [], + demo: "Slice replans after a blocker task completes.", + goal: "Prepare replan state.", + successCriteria: "Plan and replan artifacts update over MCP.", + proofLevel: "integration", + integrationClosure: "Replan uses the shared executor path.", + observabilityImpact: "Tests cover replan artifacts.", + }, + ], + }); + await sliceTool!.handler({ + projectDir: base, + milestoneId: "M099", + sliceId: "S09", + goal: "Plan a slice that will be replanned.", + planningMeeting: validPlanningMeeting(), + tasks: [ + { + taskId: "T09", + title: "Blocker task", + description: "Finish the blocker-discovery task.", + estimate: "5m", + files: ["src/blocker.ts"], + verify: "node --test", + inputs: ["M099-ROADMAP.md"], + expectedOutput: ["T09-SUMMARY.md"], + }, + { + taskId: "T10", + title: "Pending task", + description: "Original follow-up task.", + estimate: "10m", + files: ["src/pending.ts"], + verify: "node --test", + inputs: ["S09-PLAN.md"], + expectedOutput: ["Updated plan"], + }, + ], + }); + await taskTool!.handler({ + projectDir: base, + milestoneId: "M099", + sliceId: "S09", + taskId: "T09", + oneLiner: "Completed blocker task", + narrative: "Prepared the slice for replanning.", + verification: "node --test", + }); - const canonicalResult = await canonicalTool!.handler({ - projectDir: base, - milestoneId: "M099", - sliceId: "S09", - blockerTaskId: "T09", - blockerDescription: "Original approach is no longer viable.", - whatChanged: "Updated the remaining task and added remediation work.", - updatedTasks: [ - { - taskId: "T10", - title: "Pending task (updated)", - description: "Updated follow-up task after replanning.", - estimate: "15m", - files: ["src/pending.ts", "src/replanned.ts"], - verify: "node --test", - inputs: ["S09-PLAN.md"], - expectedOutput: ["Updated plan"], - }, - { - taskId: "T11", - title: "Remediation task", - description: "New task introduced by the replan.", - estimate: "20m", - files: ["src/remediation.ts"], - verify: "node --test", - inputs: ["S09-REPLAN.md"], - expectedOutput: ["Remediation patch"], - }, - ], - removedTaskIds: [], - }); - assert.match((canonicalResult as any).content[0].text as string, /Replanned slice S09/); + const firstReplanResult = await replanTool!.handler({ + projectDir: base, + milestoneId: "M099", + sliceId: "S09", + blockerTaskId: "T09", + blockerDescription: "Original approach is no longer viable.", + whatChanged: "Updated the remaining task and added remediation work.", + updatedTasks: [ + { + taskId: "T10", + title: "Pending task (updated)", + description: "Updated follow-up task after replanning.", + estimate: "15m", + files: ["src/pending.ts", "src/replanned.ts"], + verify: "node --test", + inputs: ["S09-PLAN.md"], + expectedOutput: ["Updated plan"], + }, + { + taskId: "T11", + title: "Remediation task", + description: "New task introduced by the replan.", + estimate: "20m", + files: ["src/remediation.ts"], + verify: "node --test", + inputs: ["S09-REPLAN.md"], + expectedOutput: ["Remediation patch"], + }, + ], + removedTaskIds: [], + }); + assert.match( + (firstReplanResult as any).content[0].text as string, + /Replanned slice S09/, + ); - const aliasResult = await aliasTool!.handler({ - projectDir: base, - milestoneId: "M099", - sliceId: "S09", - blockerTaskId: "T09", - blockerDescription: "Alias path confirms the same replan flow.", - whatChanged: "Removed the remediation task after the alias check.", - updatedTasks: [ - { - taskId: "T10", - title: "Pending task (updated again)", - description: "Alias adjusted the remaining pending task.", - estimate: "12m", - files: ["src/pending.ts"], - verify: "node --test", - inputs: ["S09-PLAN.md"], - expectedOutput: ["Updated plan"], - }, - ], - removedTaskIds: ["T11"], - }); - assert.match((aliasResult as any).content[0].text as string, /Replanned slice S09/); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M099", "slices", "S09", "S09-REPLAN.md")), - "replan artifact should exist on disk", - ); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M099", "slices", "S09", "S09-PLAN.md")), - "updated plan should exist on disk", - ); - const removedTask = _getAdapter()!.prepare( - "SELECT id FROM tasks WHERE milestone_id = ? AND slice_id = ? AND id = ?", - ).get("M099", "S09", "T11"); - assert.equal(removedTask, undefined, "alias should remove the replanned task"); - } finally { - cleanup(base); - } - }); + const secondReplanResult = await replanTool!.handler({ + projectDir: base, + milestoneId: "M099", + sliceId: "S09", + blockerTaskId: "T09", + blockerDescription: "Follow-up replan confirms the canonical flow.", + whatChanged: "Removed the remediation task after the follow-up replan.", + updatedTasks: [ + { + taskId: "T10", + title: "Pending task (updated again)", + description: + "Follow-up replan adjusted the remaining pending task.", + estimate: "12m", + files: ["src/pending.ts"], + verify: "node --test", + inputs: ["S09-PLAN.md"], + expectedOutput: ["Updated plan"], + }, + ], + removedTaskIds: ["T11"], + }); + assert.match( + (secondReplanResult as any).content[0].text as string, + /Replanned slice S09/, + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M099", + "slices", + "S09", + "S09-REPLAN.md", + ), + ), + "replan artifact should exist on disk", + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M099", + "slices", + "S09", + "S09-PLAN.md", + ), + ), + "updated plan should exist on disk", + ); + const removedTask = _getAdapter()! + .prepare( + "SELECT id FROM tasks WHERE milestone_id = ? AND slice_id = ? AND id = ?", + ) + .get("M099", "S09", "T11"); + assert.equal( + removedTask, + undefined, + "follow-up replan should remove the replanned task", + ); + } finally { + cleanup(base); + } + }); - it("sf_slice_complete works end-to-end", async () => { - const base = makeTmpBase(); - try { - const server = makeMockServer(); - registerWorkflowTools(server as any); - const milestoneTool = server.tools.find((t) => t.name === "sf_plan_milestone"); - const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); - const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); - const canonicalTool = server.tools.find((t) => t.name === "sf_slice_complete"); - assert.ok(milestoneTool, "milestone planning tool should be registered"); - assert.ok(sliceTool, "slice planning tool should be registered"); - assert.ok(taskTool, "task completion tool should be registered"); - assert.ok(canonicalTool, "slice completion tool should be registered"); + it("sf_slice_complete works end-to-end", async () => { + const base = makeTmpBase(); + try { + const server = makeMockServer(); + registerWorkflowTools(server as any); + const milestoneTool = server.tools.find( + (t) => t.name === "sf_plan_milestone", + ); + const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); + const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); + const canonicalTool = server.tools.find( + (t) => t.name === "sf_slice_complete", + ); + assert.ok(milestoneTool, "milestone planning tool should be registered"); + assert.ok(sliceTool, "slice planning tool should be registered"); + assert.ok(taskTool, "task completion tool should be registered"); + assert.ok(canonicalTool, "slice completion tool should be registered"); - await milestoneTool!.handler({ - projectDir: base, - milestoneId: "M003", - title: "Demo milestone", - vision: "Prepare canonical slice completion state.", - slices: [ - { - sliceId: "S03", - title: "Demo Slice", - risk: "medium", - depends: [], - demo: "Canonical slice completes through MCP.", - goal: "Seed workflow state.", - successCriteria: "Slice summary and UAT files are written.", - proofLevel: "integration", - integrationClosure: "Planning and completion share the MCP bridge.", - observabilityImpact: "Workflow tests cover canonical completion.", - }, - ], - }); - await sliceTool!.handler({ - projectDir: base, - milestoneId: "M003", - sliceId: "S03", - goal: "Complete canonical slice over MCP.", - planningMeeting: validPlanningMeeting(), - tasks: [ - { - taskId: "T03", - title: "Canonical task", - description: "Seed a completed task for slice completion.", - estimate: "5m", - files: ["packages/mcp-server/src/workflow-tools.ts"], - verify: "node --test", - inputs: ["M003-ROADMAP.md"], - expectedOutput: ["S03-SUMMARY.md", "S03-UAT.md"], - }, - ], - }); - await taskTool!.handler({ - projectDir: base, - milestoneId: "M003", - sliceId: "S03", - taskId: "T03", - oneLiner: "Completed canonical task", - narrative: "Prepared the canonical slice for completion.", - verification: "node --test", - }); + await milestoneTool!.handler({ + projectDir: base, + milestoneId: "M003", + title: "Demo milestone", + vision: "Prepare canonical slice completion state.", + slices: [ + { + sliceId: "S03", + title: "Demo Slice", + risk: "medium", + depends: [], + demo: "Canonical slice completes through MCP.", + goal: "Seed workflow state.", + successCriteria: "Slice summary and UAT files are written.", + proofLevel: "integration", + integrationClosure: "Planning and completion share the MCP bridge.", + observabilityImpact: "Workflow tests cover canonical completion.", + }, + ], + }); + await sliceTool!.handler({ + projectDir: base, + milestoneId: "M003", + sliceId: "S03", + goal: "Complete canonical slice over MCP.", + planningMeeting: validPlanningMeeting(), + tasks: [ + { + taskId: "T03", + title: "Canonical task", + description: "Seed a completed task for slice completion.", + estimate: "5m", + files: ["packages/mcp-server/src/workflow-tools.ts"], + verify: "node --test", + inputs: ["M003-ROADMAP.md"], + expectedOutput: ["S03-SUMMARY.md", "S03-UAT.md"], + }, + ], + }); + await taskTool!.handler({ + projectDir: base, + milestoneId: "M003", + sliceId: "S03", + taskId: "T03", + oneLiner: "Completed canonical task", + narrative: "Prepared the canonical slice for completion.", + verification: "node --test", + }); - const canonicalResult = await canonicalTool!.handler({ - projectDir: base, - milestoneId: "M003", - sliceId: "S03", - sliceTitle: "Demo Slice", - oneLiner: "Completed canonical slice", - narrative: "Did the slice work", - verification: "npm test", - uatContent: "## UAT\n\nPASS", - }); - assert.match((canonicalResult as any).content[0].text as string, /Completed slice S03/); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M003", "slices", "S03", "S03-SUMMARY.md")), - "canonical tool should write slice summary to disk", - ); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M003", "slices", "S03", "S03-UAT.md")), - "canonical tool should write slice UAT to disk", - ); - } finally { - cleanup(base); - } - }); + const canonicalResult = await canonicalTool!.handler({ + projectDir: base, + milestoneId: "M003", + sliceId: "S03", + sliceTitle: "Demo Slice", + oneLiner: "Completed canonical slice", + narrative: "Did the slice work", + verification: "npm test", + uatContent: "## UAT\n\nPASS", + }); + assert.match( + (canonicalResult as any).content[0].text as string, + /Completed slice S03/, + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M003", + "slices", + "S03", + "S03-SUMMARY.md", + ), + ), + "canonical tool should write slice summary to disk", + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M003", + "slices", + "S03", + "S03-UAT.md", + ), + ), + "canonical tool should write slice UAT to disk", + ); + } finally { + cleanup(base); + } + }); - it("sf_validate_milestone and sf_milestone_complete work end-to-end", async () => { - const base = makeTmpBase(); - try { - const server = makeMockServer(); - registerWorkflowTools(server as any); - const milestoneTool = server.tools.find((t) => t.name === "sf_plan_milestone"); - const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); - const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); - const completeSliceTool = server.tools.find((t) => t.name === "sf_slice_complete"); - const validateTool = server.tools.find((t) => t.name === "sf_validate_milestone"); - const completeMilestoneAlias = server.tools.find((t) => t.name === "sf_milestone_complete"); - assert.ok(milestoneTool, "milestone planning tool should be registered"); - assert.ok(sliceTool, "slice planning tool should be registered"); - assert.ok(taskTool, "task completion tool should be registered"); - assert.ok(completeSliceTool, "slice completion tool should be registered"); - assert.ok(validateTool, "milestone validation tool should be registered"); - assert.ok(completeMilestoneAlias, "milestone completion alias should be registered"); + it("sf_validate_milestone and sf_complete_milestone work end-to-end", async () => { + const base = makeTmpBase(); + try { + const server = makeMockServer(); + registerWorkflowTools(server as any); + const milestoneTool = server.tools.find( + (t) => t.name === "sf_plan_milestone", + ); + const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); + const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); + const completeSliceTool = server.tools.find( + (t) => t.name === "sf_slice_complete", + ); + const validateTool = server.tools.find( + (t) => t.name === "sf_validate_milestone", + ); + const completeMilestoneTool = server.tools.find( + (t) => t.name === "sf_complete_milestone", + ); + assert.ok(milestoneTool, "milestone planning tool should be registered"); + assert.ok(sliceTool, "slice planning tool should be registered"); + assert.ok(taskTool, "task completion tool should be registered"); + assert.ok( + completeSliceTool, + "slice completion tool should be registered", + ); + assert.ok(validateTool, "milestone validation tool should be registered"); + assert.ok( + completeMilestoneTool, + "milestone completion tool should be registered", + ); - await milestoneTool!.handler({ - projectDir: base, - milestoneId: "M005", - title: "Milestone lifecycle", - vision: "Drive validation and completion over MCP.", - slices: [ - { - sliceId: "S05", - title: "Lifecycle slice", - risk: "medium", - depends: [], - demo: "Milestone can validate and complete.", - goal: "Seed milestone completion state.", - successCriteria: "Summary and validation artifacts are written.", - proofLevel: "integration", - integrationClosure: "Lifecycle tools share the MCP bridge.", - observabilityImpact: "Tests cover milestone end-to-end behavior.", - }, - ], - }); - await sliceTool!.handler({ - projectDir: base, - milestoneId: "M005", - sliceId: "S05", - goal: "Prepare a complete milestone.", - planningMeeting: validPlanningMeeting(), - tasks: [ - { - taskId: "T05", - title: "Lifecycle task", - description: "Seed a fully completed slice.", - estimate: "10m", - files: ["packages/mcp-server/src/workflow-tools.ts"], - verify: "node --test", - inputs: ["M005-ROADMAP.md"], - expectedOutput: ["M005-VALIDATION.md", "M005-SUMMARY.md"], - }, - ], - }); - await taskTool!.handler({ - projectDir: base, - milestoneId: "M005", - sliceId: "S05", - taskId: "T05", - oneLiner: "Completed lifecycle task", - narrative: "Prepared the milestone for closure.", - verification: "node --test", - }); - await completeSliceTool!.handler({ - projectDir: base, - milestoneId: "M005", - sliceId: "S05", - sliceTitle: "Lifecycle Slice", - oneLiner: "Completed lifecycle slice", - narrative: "Closed the milestone slice.", - verification: "node --test", - uatContent: "## UAT\n\nPASS", - }); + await milestoneTool!.handler({ + projectDir: base, + milestoneId: "M005", + title: "Milestone lifecycle", + vision: "Drive validation and completion over MCP.", + slices: [ + { + sliceId: "S05", + title: "Lifecycle slice", + risk: "medium", + depends: [], + demo: "Milestone can validate and complete.", + goal: "Seed milestone completion state.", + successCriteria: "Summary and validation artifacts are written.", + proofLevel: "integration", + integrationClosure: "Lifecycle tools share the MCP bridge.", + observabilityImpact: "Tests cover milestone end-to-end behavior.", + }, + ], + }); + await sliceTool!.handler({ + projectDir: base, + milestoneId: "M005", + sliceId: "S05", + goal: "Prepare a complete milestone.", + planningMeeting: validPlanningMeeting(), + tasks: [ + { + taskId: "T05", + title: "Lifecycle task", + description: "Seed a fully completed slice.", + estimate: "10m", + files: ["packages/mcp-server/src/workflow-tools.ts"], + verify: "node --test", + inputs: ["M005-ROADMAP.md"], + expectedOutput: ["M005-VALIDATION.md", "M005-SUMMARY.md"], + }, + ], + }); + await taskTool!.handler({ + projectDir: base, + milestoneId: "M005", + sliceId: "S05", + taskId: "T05", + oneLiner: "Completed lifecycle task", + narrative: "Prepared the milestone for closure.", + verification: "node --test", + }); + await completeSliceTool!.handler({ + projectDir: base, + milestoneId: "M005", + sliceId: "S05", + sliceTitle: "Lifecycle Slice", + oneLiner: "Completed lifecycle slice", + narrative: "Closed the milestone slice.", + verification: "node --test", + uatContent: "## UAT\n\nPASS", + }); - const validationResult = await validateTool!.handler({ - projectDir: base, - milestoneId: "M005", - verdict: "pass", - remediationRound: 0, - successCriteriaChecklist: "- [x] Lifecycle verified", - sliceDeliveryAudit: "| Slice | Verdict |\n| --- | --- |\n| S05 | pass |", - crossSliceIntegration: "No cross-slice mismatches found.", - requirementCoverage: "No requirement gaps remain.", - verdictRationale: "The milestone delivered its scope.", - }); - assert.match((validationResult as any).content[0].text as string, /Validated milestone M005/); + const validationResult = await validateTool!.handler({ + projectDir: base, + milestoneId: "M005", + verdict: "pass", + remediationRound: 0, + successCriteriaChecklist: "- [x] Lifecycle verified", + sliceDeliveryAudit: + "| Slice | Verdict |\n| --- | --- |\n| S05 | pass |", + crossSliceIntegration: "No cross-slice mismatches found.", + requirementCoverage: "No requirement gaps remain.", + verdictRationale: "The milestone delivered its scope.", + }); + assert.match( + (validationResult as any).content[0].text as string, + /Validated milestone M005/, + ); - const completionResult = await completeMilestoneAlias!.handler({ - projectDir: base, - milestoneId: "M005", - title: "Milestone lifecycle", - oneLiner: "Milestone closed successfully", - narrative: "Validation passed and all slices were complete.", - verificationPassed: true, - }); - assert.match((completionResult as any).content[0].text as string, /Completed milestone M005/); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M005", "M005-VALIDATION.md")), - "validation artifact should exist on disk", - ); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M005", "M005-SUMMARY.md")), - "milestone summary should exist on disk", - ); - } finally { - cleanup(base); - } - }); + const completionResult = await completeMilestoneTool!.handler({ + projectDir: base, + milestoneId: "M005", + title: "Milestone lifecycle", + oneLiner: "Milestone closed successfully", + narrative: "Validation passed and all slices were complete.", + verificationPassed: true, + }); + assert.match( + (completionResult as any).content[0].text as string, + /Completed milestone M005/, + ); + assert.ok( + existsSync( + join(base, ".sf", "milestones", "M005", "M005-VALIDATION.md"), + ), + "validation artifact should exist on disk", + ); + assert.ok( + existsSync(join(base, ".sf", "milestones", "M005", "M005-SUMMARY.md")), + "milestone summary should exist on disk", + ); + } finally { + cleanup(base); + } + }); - it("sf_reassess_roadmap, sf_roadmap_reassess, and sf_save_gate_result work end-to-end", async () => { - const base = makeTmpBase(); - try { - const server = makeMockServer(); - registerWorkflowTools(server as any); - const milestoneTool = server.tools.find((t) => t.name === "sf_plan_milestone"); - const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); - const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); - const completeSliceTool = server.tools.find((t) => t.name === "sf_slice_complete"); - const reassessTool = server.tools.find((t) => t.name === "sf_reassess_roadmap"); - const reassessAlias = server.tools.find((t) => t.name === "sf_roadmap_reassess"); - const gateTool = server.tools.find((t) => t.name === "sf_save_gate_result"); - assert.ok(milestoneTool, "milestone planning tool should be registered"); - assert.ok(sliceTool, "slice planning tool should be registered"); - assert.ok(taskTool, "task completion tool should be registered"); - assert.ok(completeSliceTool, "slice completion tool should be registered"); - assert.ok(reassessTool, "roadmap reassessment tool should be registered"); - assert.ok(reassessAlias, "roadmap reassessment alias should be registered"); - assert.ok(gateTool, "gate result tool should be registered"); + it("sf_reassess_roadmap and sf_save_gate_result work end-to-end", async () => { + const base = makeTmpBase(); + try { + const server = makeMockServer(); + registerWorkflowTools(server as any); + const milestoneTool = server.tools.find( + (t) => t.name === "sf_plan_milestone", + ); + const sliceTool = server.tools.find((t) => t.name === "sf_plan_slice"); + const taskTool = server.tools.find((t) => t.name === "sf_task_complete"); + const completeSliceTool = server.tools.find( + (t) => t.name === "sf_slice_complete", + ); + const reassessTool = server.tools.find( + (t) => t.name === "sf_reassess_roadmap", + ); + const gateTool = server.tools.find( + (t) => t.name === "sf_save_gate_result", + ); + assert.ok(milestoneTool, "milestone planning tool should be registered"); + assert.ok(sliceTool, "slice planning tool should be registered"); + assert.ok(taskTool, "task completion tool should be registered"); + assert.ok( + completeSliceTool, + "slice completion tool should be registered", + ); + assert.ok(reassessTool, "roadmap reassessment tool should be registered"); + assert.ok(gateTool, "gate result tool should be registered"); - await milestoneTool!.handler({ - projectDir: base, - milestoneId: "M006", - title: "Roadmap reassessment", - vision: "Drive gate results and reassessment over MCP.", - slices: [ - { - sliceId: "S06", - title: "Completed slice", - risk: "medium", - depends: [], - demo: "Completed slice triggers reassessment.", - goal: "Seed reassessment state.", - successCriteria: "Assessment and roadmap artifacts are written.", - proofLevel: "integration", - integrationClosure: "Roadmap updates share the MCP bridge.", - observabilityImpact: "Tests cover reassessment behavior.", - }, - { - sliceId: "S07", - title: "Follow-up slice", - risk: "low", - depends: ["S06"], - demo: "Follow-up slice remains pending.", - goal: "Leave room for roadmap edits.", - successCriteria: "Roadmap mutation succeeds.", - proofLevel: "integration", - integrationClosure: "Pending slice can be modified after reassessment.", - observabilityImpact: "Tests observe roadmap mutation output.", - }, - ], - }); - await sliceTool!.handler({ - projectDir: base, - milestoneId: "M006", - sliceId: "S06", - goal: "Complete the first slice.", - planningMeeting: validPlanningMeeting(), - tasks: [ - { - taskId: "T06", - title: "Seed completed slice", - description: "Prepare gate and reassessment state.", - estimate: "10m", - files: ["packages/mcp-server/src/workflow-tools.ts"], - verify: "node --test", - inputs: ["M006-ROADMAP.md"], - expectedOutput: ["S06-ASSESSMENT.md", "M006-ROADMAP.md"], - }, - ], - }); + await milestoneTool!.handler({ + projectDir: base, + milestoneId: "M006", + title: "Roadmap reassessment", + vision: "Drive gate results and reassessment over MCP.", + slices: [ + { + sliceId: "S06", + title: "Completed slice", + risk: "medium", + depends: [], + demo: "Completed slice triggers reassessment.", + goal: "Seed reassessment state.", + successCriteria: "Assessment and roadmap artifacts are written.", + proofLevel: "integration", + integrationClosure: "Roadmap updates share the MCP bridge.", + observabilityImpact: "Tests cover reassessment behavior.", + }, + { + sliceId: "S07", + title: "Follow-up slice", + risk: "low", + depends: ["S06"], + demo: "Follow-up slice remains pending.", + goal: "Leave room for roadmap edits.", + successCriteria: "Roadmap mutation succeeds.", + proofLevel: "integration", + integrationClosure: + "Pending slice can be modified after reassessment.", + observabilityImpact: "Tests observe roadmap mutation output.", + }, + ], + }); + await sliceTool!.handler({ + projectDir: base, + milestoneId: "M006", + sliceId: "S06", + goal: "Complete the first slice.", + planningMeeting: validPlanningMeeting(), + tasks: [ + { + taskId: "T06", + title: "Seed completed slice", + description: "Prepare gate and reassessment state.", + estimate: "10m", + files: ["packages/mcp-server/src/workflow-tools.ts"], + verify: "node --test", + inputs: ["M006-ROADMAP.md"], + expectedOutput: ["S06-ASSESSMENT.md", "M006-ROADMAP.md"], + }, + ], + }); - const gateResult = await gateTool!.handler({ - projectDir: base, - milestoneId: "M006", - sliceId: "S06", - gateId: "Q3", - verdict: "pass", - rationale: "Threat surface is covered.", - findings: "No new attack surface was introduced.", - }); - assert.match((gateResult as any).content[0].text as string, /Gate Q3 result saved/); - const gateRows = _getAdapter()!.prepare( - "SELECT status, verdict, rationale FROM quality_gates WHERE milestone_id = ? AND slice_id = ? AND gate_id = ?", - ).all("M006", "S06", "Q3") as Array>; - assert.equal(gateRows.length, 1); - assert.equal(gateRows[0]["status"], "complete"); - assert.equal(gateRows[0]["verdict"], "pass"); + const gateResult = await gateTool!.handler({ + projectDir: base, + milestoneId: "M006", + sliceId: "S06", + gateId: "Q3", + verdict: "pass", + rationale: "Threat surface is covered.", + findings: "No new attack surface was introduced.", + }); + assert.match( + (gateResult as any).content[0].text as string, + /Gate Q3 result saved/, + ); + const gateRows = _getAdapter()! + .prepare( + "SELECT status, verdict, rationale FROM quality_gates WHERE milestone_id = ? AND slice_id = ? AND gate_id = ?", + ) + .all("M006", "S06", "Q3") as Array>; + assert.equal(gateRows.length, 1); + assert.equal(gateRows[0]["status"], "complete"); + assert.equal(gateRows[0]["verdict"], "pass"); - await taskTool!.handler({ - projectDir: base, - milestoneId: "M006", - sliceId: "S06", - taskId: "T06", - oneLiner: "Completed reassessment task", - narrative: "Prepared the slice for reassessment.", - verification: "node --test", - }); - await completeSliceTool!.handler({ - projectDir: base, - milestoneId: "M006", - sliceId: "S06", - sliceTitle: "Completed slice", - oneLiner: "Completed reassessment slice", - narrative: "Closed the completed slice before reassessment.", - verification: "node --test", - uatContent: "## UAT\n\nPASS", - }); + await taskTool!.handler({ + projectDir: base, + milestoneId: "M006", + sliceId: "S06", + taskId: "T06", + oneLiner: "Completed reassessment task", + narrative: "Prepared the slice for reassessment.", + verification: "node --test", + }); + await completeSliceTool!.handler({ + projectDir: base, + milestoneId: "M006", + sliceId: "S06", + sliceTitle: "Completed slice", + oneLiner: "Completed reassessment slice", + narrative: "Closed the completed slice before reassessment.", + verification: "node --test", + uatContent: "## UAT\n\nPASS", + }); - const reassessResult = await reassessTool!.handler({ - projectDir: base, - milestoneId: "M006", - completedSliceId: "S06", - verdict: "roadmap-adjusted", - assessment: "Insert remediation work after the completed slice.", - sliceChanges: { - modified: [ - { - sliceId: "S07", - title: "Follow-up slice (adjusted)", - risk: "medium", - depends: ["S06"], - demo: "Adjusted demo", - }, - ], - added: [ - { - sliceId: "S08", - title: "Remediation slice", - risk: "high", - depends: ["S07"], - demo: "Remediation demo", - }, - ], - removed: [], - }, - }); - assert.match((reassessResult as any).content[0].text as string, /Reassessed roadmap for milestone M006 after S06/); + const reassessResult = await reassessTool!.handler({ + projectDir: base, + milestoneId: "M006", + completedSliceId: "S06", + verdict: "roadmap-adjusted", + assessment: "Insert remediation work after the completed slice.", + sliceChanges: { + modified: [ + { + sliceId: "S07", + title: "Follow-up slice (adjusted)", + risk: "medium", + depends: ["S06"], + demo: "Adjusted demo", + }, + ], + added: [ + { + sliceId: "S08", + title: "Remediation slice", + risk: "high", + depends: ["S07"], + demo: "Remediation demo", + }, + ], + removed: [], + }, + }); + assert.match( + (reassessResult as any).content[0].text as string, + /Reassessed roadmap for milestone M006 after S06/, + ); - const reassessAliasResult = await reassessAlias!.handler({ - projectDir: base, - milestoneId: "M006", - completedSliceId: "S06", - verdict: "roadmap-confirmed", - assessment: "No further changes needed after the first reassessment.", - sliceChanges: { - modified: [], - added: [], - removed: [], - }, - }); - assert.match((reassessAliasResult as any).content[0].text as string, /Reassessed roadmap for milestone M006 after S06/); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M006", "slices", "S06", "S06-ASSESSMENT.md")), - "assessment artifact should exist on disk", - ); - assert.ok( - existsSync(join(base, ".sf", "milestones", "M006", "M006-ROADMAP.md")), - "roadmap artifact should exist on disk", - ); - } finally { - cleanup(base); - } - }); + const secondReassessResult = await reassessTool!.handler({ + projectDir: base, + milestoneId: "M006", + completedSliceId: "S06", + verdict: "roadmap-confirmed", + assessment: "No further changes needed after the first reassessment.", + sliceChanges: { + modified: [], + added: [], + removed: [], + }, + }); + assert.match( + (secondReassessResult as any).content[0].text as string, + /Reassessed roadmap for milestone M006 after S06/, + ); + assert.ok( + existsSync( + join( + base, + ".sf", + "milestones", + "M006", + "slices", + "S06", + "S06-ASSESSMENT.md", + ), + ), + "assessment artifact should exist on disk", + ); + assert.ok( + existsSync(join(base, ".sf", "milestones", "M006", "M006-ROADMAP.md")), + "roadmap artifact should exist on disk", + ); + } finally { + cleanup(base); + } + }); }); describe("URL scheme regex — Windows drive letter safety", () => { - // This is the regex used in getWriteGateModuleCandidates() and - // getWorkflowExecutorModuleCandidates() to reject non-file URL schemes. - // It must NOT match single-letter Windows drive prefixes (C:, D:, etc.). - const urlSchemeRegex = /^[a-z]{2,}:/i; + // This is the regex used in getWriteGateModuleCandidates() and + // getWorkflowExecutorModuleCandidates() to reject non-file URL schemes. + // It must NOT match single-letter Windows drive prefixes (C:, D:, etc.). + const urlSchemeRegex = /^[a-z]{2,}:/i; - it("rejects multi-letter URL schemes", () => { - assert.ok(urlSchemeRegex.test("http://example.com"), "http: should match"); - assert.ok(urlSchemeRegex.test("https://example.com"), "https: should match"); - assert.ok(urlSchemeRegex.test("ftp://files.example.com"), "ftp: should match"); - assert.ok(urlSchemeRegex.test("file:///C:/Users"), "file: should match"); - assert.ok(urlSchemeRegex.test("node:fs"), "node: should match"); - }); + it("rejects multi-letter URL schemes", () => { + assert.ok(urlSchemeRegex.test("http://example.com"), "http: should match"); + assert.ok( + urlSchemeRegex.test("https://example.com"), + "https: should match", + ); + assert.ok( + urlSchemeRegex.test("ftp://files.example.com"), + "ftp: should match", + ); + assert.ok(urlSchemeRegex.test("file:///C:/Users"), "file: should match"); + assert.ok(urlSchemeRegex.test("node:fs"), "node: should match"); + }); - it("allows single-letter Windows drive prefixes", () => { - assert.ok(!urlSchemeRegex.test("C:\\Users\\user\\project"), "C:\\ should not match"); - assert.ok(!urlSchemeRegex.test("D:\\other\\path"), "D:\\ should not match"); - assert.ok(!urlSchemeRegex.test("c:\\lowercase\\drive"), "c:\\ should not match"); - assert.ok(!urlSchemeRegex.test("E:/forward/slash/path"), "E:/ should not match"); - }); + it("allows single-letter Windows drive prefixes", () => { + assert.ok( + !urlSchemeRegex.test("C:\\Users\\user\\project"), + "C:\\ should not match", + ); + assert.ok(!urlSchemeRegex.test("D:\\other\\path"), "D:\\ should not match"); + assert.ok( + !urlSchemeRegex.test("c:\\lowercase\\drive"), + "c:\\ should not match", + ); + assert.ok( + !urlSchemeRegex.test("E:/forward/slash/path"), + "E:/ should not match", + ); + }); - it("allows bare filesystem paths", () => { - assert.ok(!urlSchemeRegex.test("/usr/local/lib/module.js"), "unix absolute path should not match"); - assert.ok(!urlSchemeRegex.test("./relative/path.js"), "relative path should not match"); - assert.ok(!urlSchemeRegex.test("../parent/path.js"), "parent relative path should not match"); - }); + it("allows bare filesystem paths", () => { + assert.ok( + !urlSchemeRegex.test("/usr/local/lib/module.js"), + "unix absolute path should not match", + ); + assert.ok( + !urlSchemeRegex.test("./relative/path.js"), + "relative path should not match", + ); + assert.ok( + !urlSchemeRegex.test("../parent/path.js"), + "parent relative path should not match", + ); + }); }); diff --git a/packages/mcp-server/src/workflow-tools.ts b/packages/mcp-server/src/workflow-tools.ts index ede9180e4..6aefdec30 100644 --- a/packages/mcp-server/src/workflow-tools.ts +++ b/packages/mcp-server/src/workflow-tools.ts @@ -7,262 +7,275 @@ import { pathToFileURL } from "node:url"; import { z } from "zod"; type WorkflowToolExecutors = { - SUPPORTED_SUMMARY_ARTIFACT_TYPES: readonly string[]; - executeMilestoneStatus: (params: { milestoneId: string }, basePath?: string) => Promise; - executePlanMilestone: ( - params: { - milestoneId: string; - title: string; - vision: string; - slices?: Array<{ - sliceId: string; - title: string; - risk: string; - depends: string[]; - demo: string; - goal: string; - successCriteria: string; - proofLevel: string; - integrationClosure: string; - observabilityImpact: string; - }>; - templateId?: string; - status?: string; - dependsOn?: string[]; - successCriteria?: string[]; - keyRisks?: Array<{ risk: string; whyItMatters: string }>; - proofStrategy?: Array<{ riskOrUnknown: string; retireIn: string; whatWillBeProven: string }>; - verificationContract?: string; - verificationIntegration?: string; - verificationOperational?: string; - verificationUat?: string; - definitionOfDone?: string[]; - requirementCoverage?: string; - boundaryMapMarkdown?: string; - visionMeeting?: { - trigger: string; - pm: string; - userAdvocate: string; - customerPanel: string; - business: string; - researcher: string; - deliveryLead: string; - partner: string; - combatant: string; - architect: string; - moderator: string; - weightedSynthesis: string; - confidenceByArea: string; - recommendedRoute: "discussing" | "researching" | "planning"; - }; - }, - basePath?: string, - ) => Promise; - executePlanSlice: ( - params: { - milestoneId: string; - sliceId: string; - goal: string; - tasks: Array<{ - taskId: string; - title: string; - description: string; - estimate: string; - files: string[]; - verify: string; - inputs: string[]; - expectedOutput: string[]; - observabilityImpact?: string; - }>; - successCriteria?: string; - proofLevel?: string; - integrationClosure?: string; - observabilityImpact?: string; - }, - basePath?: string, - ) => Promise; - executeReplanSlice: ( - params: { - milestoneId: string; - sliceId: string; - blockerTaskId: string; - blockerDescription: string; - whatChanged: string; - updatedTasks: Array<{ - taskId: string; - title: string; - description: string; - estimate: string; - files: string[]; - verify: string; - inputs: string[]; - expectedOutput: string[]; - fullPlanMd?: string; - }>; - removedTaskIds: string[]; - }, - basePath?: string, - ) => Promise; - executeSliceComplete: ( - params: { - sliceId: string; - milestoneId: string; - sliceTitle: string; - oneLiner: string; - narrative: string; - verification: string; - uatContent: string; - deviations?: string; - knownLimitations?: string; - followUps?: string; - keyFiles?: string[] | string; - keyDecisions?: string[] | string; - patternsEstablished?: string[] | string; - observabilitySurfaces?: string[] | string; - provides?: string[] | string; - requirementsSurfaced?: string[] | string; - drillDownPaths?: string[] | string; - affects?: string[] | string; - requirementsAdvanced?: Array<{ id: string; how: string } | string>; - requirementsValidated?: Array<{ id: string; proof: string } | string>; - requirementsInvalidated?: Array<{ id: string; what: string } | string>; - filesModified?: Array<{ path: string; description: string } | string>; - requires?: Array<{ slice: string; provides: string } | string>; - }, - basePath?: string, - ) => Promise; - executeCompleteMilestone: ( - params: { - milestoneId: string; - title: string; - oneLiner: string; - narrative: string; - verificationPassed: boolean; - successCriteriaResults?: string; - definitionOfDoneResults?: string; - requirementOutcomes?: string; - keyDecisions?: string[]; - keyFiles?: string[]; - lessonsLearned?: string[]; - followUps?: string; - deviations?: string; - }, - basePath?: string, - ) => Promise; - executeValidateMilestone: ( - params: { - milestoneId: string; - verdict: "pass" | "needs-attention" | "needs-remediation"; - remediationRound: number; - successCriteriaChecklist: string; - sliceDeliveryAudit: string; - crossSliceIntegration: string; - requirementCoverage: string; - verificationClasses?: string; - verdictRationale: string; - remediationPlan?: string; - }, - basePath?: string, - ) => Promise; - executeReassessRoadmap: ( - params: { - milestoneId: string; - completedSliceId: string; - verdict: string; - assessment: string; - sliceChanges: { - modified: Array<{ - sliceId: string; - title: string; - risk?: string; - depends?: string[]; - demo?: string; - }>; - added: Array<{ - sliceId: string; - title: string; - risk?: string; - depends?: string[]; - demo?: string; - }>; - removed: string[]; - }; - }, - basePath?: string, - ) => Promise; - executeSaveGateResult: ( - params: { - milestoneId: string; - sliceId: string; - gateId: string; - taskId?: string; - verdict: "pass" | "flag" | "omitted"; - rationale: string; - findings?: string; - }, - basePath?: string, - ) => Promise; - executeSummarySave: ( - params: { - milestone_id: string; - slice_id?: string; - task_id?: string; - artifact_type: string; - content: string; - }, - basePath?: string, - ) => Promise; - executeTaskComplete: ( - params: { - taskId: string; - sliceId: string; - milestoneId: string; - oneLiner: string; - narrative: string; - verification: string; - deviations?: string; - knownIssues?: string; - keyFiles?: string[]; - keyDecisions?: string[]; - blockerDiscovered?: boolean; - verificationEvidence?: Array< - { command: string; exitCode: number; verdict: string; durationMs: number } | string - >; - }, - basePath?: string, - ) => Promise; + SUPPORTED_SUMMARY_ARTIFACT_TYPES: readonly string[]; + executeMilestoneStatus: ( + params: { milestoneId: string }, + basePath?: string, + ) => Promise; + executePlanMilestone: ( + params: { + milestoneId: string; + title: string; + vision: string; + slices?: Array<{ + sliceId: string; + title: string; + risk: string; + depends: string[]; + demo: string; + goal: string; + successCriteria: string; + proofLevel: string; + integrationClosure: string; + observabilityImpact: string; + }>; + templateId?: string; + status?: string; + dependsOn?: string[]; + successCriteria?: string[]; + keyRisks?: Array<{ risk: string; whyItMatters: string }>; + proofStrategy?: Array<{ + riskOrUnknown: string; + retireIn: string; + whatWillBeProven: string; + }>; + verificationContract?: string; + verificationIntegration?: string; + verificationOperational?: string; + verificationUat?: string; + definitionOfDone?: string[]; + requirementCoverage?: string; + boundaryMapMarkdown?: string; + visionMeeting?: { + trigger: string; + pm: string; + userAdvocate: string; + customerPanel: string; + business: string; + researcher: string; + deliveryLead: string; + partner: string; + combatant: string; + architect: string; + moderator: string; + weightedSynthesis: string; + confidenceByArea: string; + recommendedRoute: "discussing" | "researching" | "planning"; + }; + }, + basePath?: string, + ) => Promise; + executePlanSlice: ( + params: { + milestoneId: string; + sliceId: string; + goal: string; + tasks: Array<{ + taskId: string; + title: string; + description: string; + estimate: string; + files: string[]; + verify: string; + inputs: string[]; + expectedOutput: string[]; + observabilityImpact?: string; + }>; + successCriteria?: string; + proofLevel?: string; + integrationClosure?: string; + observabilityImpact?: string; + }, + basePath?: string, + ) => Promise; + executeReplanSlice: ( + params: { + milestoneId: string; + sliceId: string; + blockerTaskId: string; + blockerDescription: string; + whatChanged: string; + updatedTasks: Array<{ + taskId: string; + title: string; + description: string; + estimate: string; + files: string[]; + verify: string; + inputs: string[]; + expectedOutput: string[]; + fullPlanMd?: string; + }>; + removedTaskIds: string[]; + }, + basePath?: string, + ) => Promise; + executeSliceComplete: ( + params: { + sliceId: string; + milestoneId: string; + sliceTitle: string; + oneLiner: string; + narrative: string; + verification: string; + uatContent: string; + deviations?: string; + knownLimitations?: string; + followUps?: string; + keyFiles?: string[] | string; + keyDecisions?: string[] | string; + patternsEstablished?: string[] | string; + observabilitySurfaces?: string[] | string; + provides?: string[] | string; + requirementsSurfaced?: string[] | string; + drillDownPaths?: string[] | string; + affects?: string[] | string; + requirementsAdvanced?: Array<{ id: string; how: string } | string>; + requirementsValidated?: Array<{ id: string; proof: string } | string>; + requirementsInvalidated?: Array<{ id: string; what: string } | string>; + filesModified?: Array<{ path: string; description: string } | string>; + requires?: Array<{ slice: string; provides: string } | string>; + }, + basePath?: string, + ) => Promise; + executeCompleteMilestone: ( + params: { + milestoneId: string; + title: string; + oneLiner: string; + narrative: string; + verificationPassed: boolean; + successCriteriaResults?: string; + definitionOfDoneResults?: string; + requirementOutcomes?: string; + keyDecisions?: string[]; + keyFiles?: string[]; + lessonsLearned?: string[]; + followUps?: string; + deviations?: string; + }, + basePath?: string, + ) => Promise; + executeValidateMilestone: ( + params: { + milestoneId: string; + verdict: "pass" | "needs-attention" | "needs-remediation"; + remediationRound: number; + successCriteriaChecklist: string; + sliceDeliveryAudit: string; + crossSliceIntegration: string; + requirementCoverage: string; + verificationClasses?: string; + verdictRationale: string; + remediationPlan?: string; + }, + basePath?: string, + ) => Promise; + executeReassessRoadmap: ( + params: { + milestoneId: string; + completedSliceId: string; + verdict: string; + assessment: string; + sliceChanges: { + modified: Array<{ + sliceId: string; + title: string; + risk?: string; + depends?: string[]; + demo?: string; + }>; + added: Array<{ + sliceId: string; + title: string; + risk?: string; + depends?: string[]; + demo?: string; + }>; + removed: string[]; + }; + }, + basePath?: string, + ) => Promise; + executeSaveGateResult: ( + params: { + milestoneId: string; + sliceId: string; + gateId: string; + taskId?: string; + verdict: "pass" | "flag" | "omitted"; + rationale: string; + findings?: string; + }, + basePath?: string, + ) => Promise; + executeSummarySave: ( + params: { + milestone_id: string; + slice_id?: string; + task_id?: string; + artifact_type: string; + content: string; + }, + basePath?: string, + ) => Promise; + executeTaskComplete: ( + params: { + taskId: string; + sliceId: string; + milestoneId: string; + oneLiner: string; + narrative: string; + verification: string; + deviations?: string; + knownIssues?: string; + keyFiles?: string[]; + keyDecisions?: string[]; + blockerDiscovered?: boolean; + verificationEvidence?: Array< + | { + command: string; + exitCode: number; + verdict: string; + durationMs: number; + } + | string + >; + }, + basePath?: string, + ) => Promise; }; type WorkflowWriteGateModule = { - loadWriteGateSnapshot: (basePath?: string) => { - verifiedDepthMilestones: string[]; - activeQueuePhase: boolean; - pendingGateId: string | null; - }; - shouldBlockPendingGateInSnapshot: ( - snapshot: { - verifiedDepthMilestones: string[]; - activeQueuePhase: boolean; - pendingGateId: string | null; - }, - toolName: string, - milestoneId: string | null, - queuePhaseActive?: boolean, - ) => { block: boolean; reason?: string }; - shouldBlockQueueExecutionInSnapshot: ( - snapshot: { - verifiedDepthMilestones: string[]; - activeQueuePhase: boolean; - pendingGateId: string | null; - }, - toolName: string, - input: string, - queuePhaseActive?: boolean, - ) => { block: boolean; reason?: string }; + loadWriteGateSnapshot: (basePath?: string) => { + verifiedDepthMilestones: string[]; + activeQueuePhase: boolean; + pendingGateId: string | null; + }; + shouldBlockPendingGateInSnapshot: ( + snapshot: { + verifiedDepthMilestones: string[]; + activeQueuePhase: boolean; + pendingGateId: string | null; + }, + toolName: string, + milestoneId: string | null, + queuePhaseActive?: boolean, + ) => { block: boolean; reason?: string }; + shouldBlockQueueExecutionInSnapshot: ( + snapshot: { + verifiedDepthMilestones: string[]; + activeQueuePhase: boolean; + pendingGateId: string | null; + }, + toolName: string, + input: string, + queuePhaseActive?: boolean, + ) => { block: boolean; reason?: string }; }; type WorkflowDbBootstrapModule = { - ensureDbOpen: (basePath?: string) => Promise; + ensureDbOpen: (basePath?: string) => Promise; }; let workflowToolExecutorsPromise: Promise | null = null; @@ -271,1173 +284,1428 @@ let workflowWriteGatePromise: Promise | null = null; /** Reset module-level singletons so tests can vary env vars between runs. */ export function _resetWorkflowModuleState(): void { - workflowToolExecutorsPromise = null; - workflowExecutionQueue = Promise.resolve(); - workflowWriteGatePromise = null; + workflowToolExecutorsPromise = null; + workflowExecutionQueue = Promise.resolve(); + workflowWriteGatePromise = null; } -function getAllowedProjectRoot(env: NodeJS.ProcessEnv = process.env): string | null { - const configuredRoot = env.SF_WORKFLOW_PROJECT_ROOT?.trim(); - return configuredRoot ? resolve(configuredRoot) : null; +function getAllowedProjectRoot( + env: NodeJS.ProcessEnv = process.env, +): string | null { + const configuredRoot = env.SF_WORKFLOW_PROJECT_ROOT?.trim(); + return configuredRoot ? resolve(configuredRoot) : null; } function isWithinRoot(candidatePath: string, rootPath: string): boolean { - const rel = relative(rootPath, candidatePath); - return rel === "" || (!rel.startsWith("..") && !isAbsolute(rel)); + const rel = relative(rootPath, candidatePath); + return rel === "" || (!rel.startsWith("..") && !isAbsolute(rel)); } -function validateProjectDir(projectDir: string, env: NodeJS.ProcessEnv = process.env): string { - if (!isAbsolute(projectDir)) { - throw new Error(`projectDir must be an absolute path. Received: ${projectDir}`); - } +function validateProjectDir( + projectDir: string, + env: NodeJS.ProcessEnv = process.env, +): string { + if (!isAbsolute(projectDir)) { + throw new Error( + `projectDir must be an absolute path. Received: ${projectDir}`, + ); + } - const resolvedProjectDir = resolve(projectDir); - const allowedRoot = getAllowedProjectRoot(env); - if (allowedRoot && !isWithinRoot(resolvedProjectDir, allowedRoot)) { - throw new Error( - `projectDir must stay within the configured workflow project root. Received: ${resolvedProjectDir}; allowed root: ${allowedRoot}`, - ); - } + const resolvedProjectDir = resolve(projectDir); + const allowedRoot = getAllowedProjectRoot(env); + if (allowedRoot && !isWithinRoot(resolvedProjectDir, allowedRoot)) { + throw new Error( + `projectDir must stay within the configured workflow project root. Received: ${resolvedProjectDir}; allowed root: ${allowedRoot}`, + ); + } - return resolvedProjectDir; + return resolvedProjectDir; } -function parseToolArgs(schema: z.ZodType, args: Record): T { - return schema.parse(args); +function parseToolArgs( + schema: z.ZodType, + args: Record, +): T { + return schema.parse(args); } function parseWorkflowArgs( - schema: z.ZodType, - args: Record, + schema: z.ZodType, + args: Record, ): T { - const parsed = parseToolArgs(schema, args); - return { - ...parsed, - projectDir: validateProjectDir(parsed.projectDir), - }; + const parsed = parseToolArgs(schema, args); + return { + ...parsed, + projectDir: validateProjectDir(parsed.projectDir), + }; } -function isWorkflowToolExecutors(value: unknown): value is WorkflowToolExecutors { - if (!value || typeof value !== "object") return false; - const record = value as Record; - const functionExports = [ - "executeMilestoneStatus", - "executePlanMilestone", - "executePlanSlice", - "executeReplanSlice", - "executeSliceComplete", - "executeCompleteMilestone", - "executeValidateMilestone", - "executeReassessRoadmap", - "executeSaveGateResult", - "executeSummarySave", - "executeTaskComplete", - ]; +function isWorkflowToolExecutors( + value: unknown, +): value is WorkflowToolExecutors { + if (!value || typeof value !== "object") return false; + const record = value as Record; + const functionExports = [ + "executeMilestoneStatus", + "executePlanMilestone", + "executePlanSlice", + "executeReplanSlice", + "executeSliceComplete", + "executeCompleteMilestone", + "executeValidateMilestone", + "executeReassessRoadmap", + "executeSaveGateResult", + "executeSummarySave", + "executeTaskComplete", + ]; - return Array.isArray(record.SUPPORTED_SUMMARY_ARTIFACT_TYPES) && - functionExports.every((key) => typeof record[key] === "function"); + return ( + Array.isArray(record.SUPPORTED_SUMMARY_ARTIFACT_TYPES) && + functionExports.every((key) => typeof record[key] === "function") + ); } -function getSupportedSummaryArtifactTypes(executors: WorkflowToolExecutors): readonly string[] { - return executors.SUPPORTED_SUMMARY_ARTIFACT_TYPES; +function getSupportedSummaryArtifactTypes( + executors: WorkflowToolExecutors, +): readonly string[] { + return executors.SUPPORTED_SUMMARY_ARTIFACT_TYPES; } function getWriteGateModuleCandidates(): string[] { - const candidates: string[] = []; - const explicitModule = process.env.SF_WORKFLOW_WRITE_GATE_MODULE?.trim(); - if (explicitModule) { - if (/^[a-z]{2,}:/i.test(explicitModule) && !explicitModule.startsWith("file:")) { - throw new Error("SF_WORKFLOW_WRITE_GATE_MODULE only supports file: URLs or filesystem paths."); - } - candidates.push(explicitModule.startsWith("file:") ? explicitModule : toFileUrl(explicitModule)); - } + const candidates: string[] = []; + const explicitModule = process.env.SF_WORKFLOW_WRITE_GATE_MODULE?.trim(); + if (explicitModule) { + if ( + /^[a-z]{2,}:/i.test(explicitModule) && + !explicitModule.startsWith("file:") + ) { + throw new Error( + "SF_WORKFLOW_WRITE_GATE_MODULE only supports file: URLs or filesystem paths.", + ); + } + candidates.push( + explicitModule.startsWith("file:") + ? explicitModule + : toFileUrl(explicitModule), + ); + } - candidates.push( - new URL("../../../src/resources/extensions/sf/bootstrap/write-gate.js", import.meta.url).href, - new URL("../../../dist/resources/extensions/sf/bootstrap/write-gate.js", import.meta.url).href, - new URL("../../../src/resources/extensions/sf/bootstrap/write-gate.ts", import.meta.url).href, - ); + candidates.push( + new URL( + "../../../src/resources/extensions/sf/bootstrap/write-gate.js", + import.meta.url, + ).href, + new URL( + "../../../dist/resources/extensions/sf/bootstrap/write-gate.js", + import.meta.url, + ).href, + new URL( + "../../../src/resources/extensions/sf/bootstrap/write-gate.ts", + import.meta.url, + ).href, + ); - return [...new Set(candidates)]; + return [...new Set(candidates)]; } function toFileUrl(modulePath: string): string { - return pathToFileURL(resolve(modulePath)).href; + return pathToFileURL(resolve(modulePath)).href; } /** @internal — exported for testing only */ export function _buildImportCandidates(relativePath: string): string[] { - // Build candidate paths: try the given path first, then swap src/<->dist/ - // and try .ts extension. This handles both dev (tsx from src/) and prod - // (compiled from dist/) execution contexts. - const candidates: string[] = [relativePath]; - const swapped = relativePath.includes("/src/") - ? relativePath.replace("/src/", "/dist/") - : relativePath.includes("/dist/") - ? relativePath.replace("/dist/", "/src/") - : null; - if (swapped) candidates.push(swapped); - // Also try .ts variants for dev-mode tsx execution - if (relativePath.endsWith(".js")) { - candidates.push(relativePath.replace(/\.js$/, ".ts")); - if (swapped) candidates.push(swapped.replace(/\.js$/, ".ts")); - } - return candidates; + // Build candidate paths: try the given path first, then swap src/<->dist/ + // and try .ts extension. This handles both dev (tsx from src/) and prod + // (compiled from dist/) execution contexts. + const candidates: string[] = [relativePath]; + const swapped = relativePath.includes("/src/") + ? relativePath.replace("/src/", "/dist/") + : relativePath.includes("/dist/") + ? relativePath.replace("/dist/", "/src/") + : null; + if (swapped) candidates.push(swapped); + // Also try .ts variants for dev-mode tsx execution + if (relativePath.endsWith(".js")) { + candidates.push(relativePath.replace(/\.js$/, ".ts")); + if (swapped) candidates.push(swapped.replace(/\.js$/, ".ts")); + } + return candidates; } async function importLocalModule(relativePath: string): Promise { - const candidates = _buildImportCandidates(relativePath) - .map((p) => new URL(p, import.meta.url).href); + const candidates = _buildImportCandidates(relativePath).map( + (p) => new URL(p, import.meta.url).href, + ); - let lastErr: unknown; - for (const candidate of candidates) { - try { - return await import(candidate) as T; - } catch (err) { - lastErr = err; - } - } - throw lastErr; + let lastErr: unknown; + for (const candidate of candidates) { + try { + return (await import(candidate)) as T; + } catch (err) { + lastErr = err; + } + } + throw lastErr; } -function getWorkflowExecutorModuleCandidates(env: NodeJS.ProcessEnv = process.env): string[] { - const candidates: string[] = []; - const explicitModule = env.SF_WORKFLOW_EXECUTORS_MODULE?.trim(); - if (explicitModule) { - if (/^[a-z]{2,}:/i.test(explicitModule) && !explicitModule.startsWith("file:")) { - throw new Error("SF_WORKFLOW_EXECUTORS_MODULE only supports file: URLs or filesystem paths."); - } - candidates.push(explicitModule.startsWith("file:") ? explicitModule : toFileUrl(explicitModule)); - } +function getWorkflowExecutorModuleCandidates( + env: NodeJS.ProcessEnv = process.env, +): string[] { + const candidates: string[] = []; + const explicitModule = env.SF_WORKFLOW_EXECUTORS_MODULE?.trim(); + if (explicitModule) { + if ( + /^[a-z]{2,}:/i.test(explicitModule) && + !explicitModule.startsWith("file:") + ) { + throw new Error( + "SF_WORKFLOW_EXECUTORS_MODULE only supports file: URLs or filesystem paths.", + ); + } + candidates.push( + explicitModule.startsWith("file:") + ? explicitModule + : toFileUrl(explicitModule), + ); + } - candidates.push( - new URL("../../../src/resources/extensions/sf/tools/workflow-tool-executors.js", import.meta.url).href, - new URL("../../../dist/resources/extensions/sf/tools/workflow-tool-executors.js", import.meta.url).href, - new URL("../../../src/resources/extensions/sf/tools/workflow-tool-executors.ts", import.meta.url).href, - ); + candidates.push( + new URL( + "../../../src/resources/extensions/sf/tools/workflow-tool-executors.js", + import.meta.url, + ).href, + new URL( + "../../../dist/resources/extensions/sf/tools/workflow-tool-executors.js", + import.meta.url, + ).href, + new URL( + "../../../src/resources/extensions/sf/tools/workflow-tool-executors.ts", + import.meta.url, + ).href, + ); - return [...new Set(candidates)]; + return [...new Set(candidates)]; } async function getWorkflowToolExecutors(): Promise { - if (!workflowToolExecutorsPromise) { - workflowToolExecutorsPromise = (async () => { - const attempts: string[] = []; - for (const candidate of getWorkflowExecutorModuleCandidates()) { - try { - const loaded = await import(candidate); - if (isWorkflowToolExecutors(loaded)) { - return loaded; - } - attempts.push(`${candidate} (module shape mismatch)`); - } catch (err) { - attempts.push(`${candidate} (${err instanceof Error ? err.message : String(err)})`); - } - } + if (!workflowToolExecutorsPromise) { + workflowToolExecutorsPromise = (async () => { + const attempts: string[] = []; + for (const candidate of getWorkflowExecutorModuleCandidates()) { + try { + const loaded = await import(candidate); + if (isWorkflowToolExecutors(loaded)) { + return loaded; + } + attempts.push(`${candidate} (module shape mismatch)`); + } catch (err) { + attempts.push( + `${candidate} (${err instanceof Error ? err.message : String(err)})`, + ); + } + } - throw new Error( - "Unable to load SF workflow executor bridge for MCP mutation tools. " + - "Set SF_WORKFLOW_EXECUTORS_MODULE to an importable workflow-tool-executors module, " + - "or run the MCP server from a SF checkout that includes src/resources/extensions/sf/tools/workflow-tool-executors.(js|ts). " + - `Attempts: ${attempts.join("; ")}`, - ); - })(); - } - return workflowToolExecutorsPromise; + throw new Error( + "Unable to load SF workflow executor bridge for MCP mutation tools. " + + "Set SF_WORKFLOW_EXECUTORS_MODULE to an importable workflow-tool-executors module, " + + "or run the MCP server from a SF checkout that includes src/resources/extensions/sf/tools/workflow-tool-executors.(js|ts). " + + `Attempts: ${attempts.join("; ")}`, + ); + })(); + } + return workflowToolExecutorsPromise; } async function getWorkflowWriteGateModule(): Promise { - if (!workflowWriteGatePromise) { - workflowWriteGatePromise = (async () => { - const attempts: string[] = []; - for (const candidate of getWriteGateModuleCandidates()) { - try { - const loaded = await import(candidate); - if ( - loaded && - typeof loaded.loadWriteGateSnapshot === "function" && - typeof loaded.shouldBlockPendingGateInSnapshot === "function" && - typeof loaded.shouldBlockQueueExecutionInSnapshot === "function" - ) { - return loaded as WorkflowWriteGateModule; - } - attempts.push(`${candidate} (module shape mismatch)`); - } catch (err) { - attempts.push(`${candidate} (${err instanceof Error ? err.message : String(err)})`); - } - } + if (!workflowWriteGatePromise) { + workflowWriteGatePromise = (async () => { + const attempts: string[] = []; + for (const candidate of getWriteGateModuleCandidates()) { + try { + const loaded = await import(candidate); + if ( + loaded && + typeof loaded.loadWriteGateSnapshot === "function" && + typeof loaded.shouldBlockPendingGateInSnapshot === "function" && + typeof loaded.shouldBlockQueueExecutionInSnapshot === "function" + ) { + return loaded as WorkflowWriteGateModule; + } + attempts.push(`${candidate} (module shape mismatch)`); + } catch (err) { + attempts.push( + `${candidate} (${err instanceof Error ? err.message : String(err)})`, + ); + } + } - throw new Error( - "Unable to load SF write-gate bridge for workflow MCP tools. " + - `Attempts: ${attempts.join("; ")}`, - ); - })(); - } - return workflowWriteGatePromise; + throw new Error( + "Unable to load SF write-gate bridge for workflow MCP tools. " + + `Attempts: ${attempts.join("; ")}`, + ); + })(); + } + return workflowWriteGatePromise; } interface McpToolServer { - tool( - name: string, - description: string, - params: Record, - handler: (args: Record) => Promise, - ): unknown; + tool( + name: string, + description: string, + params: Record, + handler: (args: Record) => Promise, + ): unknown; } export const WORKFLOW_TOOL_NAMES = [ - "sf_decision_save", - "sf_save_decision", - "sf_requirement_update", - "sf_update_requirement", - "sf_requirement_save", - "sf_save_requirement", - "sf_milestone_generate_id", - "sf_plan_milestone", - "sf_plan_slice", - "sf_plan_task", - "sf_task_plan", - "sf_replan_slice", - "sf_slice_replan", - "sf_slice_complete", - "sf_skip_slice", - "sf_complete_milestone", - "sf_milestone_complete", - "sf_validate_milestone", - "sf_milestone_validate", - "sf_reassess_roadmap", - "sf_roadmap_reassess", - "sf_save_gate_result", - "sf_summary_save", - "sf_task_complete", - "sf_milestone_status", - "sf_journal_query", + "sf_decision_save", + "sf_requirement_update", + "sf_requirement_save", + "sf_milestone_generate_id", + "sf_plan_milestone", + "sf_plan_slice", + "sf_plan_task", + "sf_replan_slice", + "sf_slice_complete", + "sf_skip_slice", + "sf_complete_milestone", + "sf_validate_milestone", + "sf_reassess_roadmap", + "sf_save_gate_result", + "sf_summary_save", + "sf_task_complete", + "sf_milestone_status", + "sf_journal_query", ] as const; -async function runSerializedWorkflowOperation(fn: () => Promise): Promise { - // The shared DB adapter and workflow log base path are process-global, so - // workflow MCP mutations must not overlap within a single server process. - const prior = workflowExecutionQueue; - let release!: () => void; - workflowExecutionQueue = new Promise((resolve) => { - release = resolve; - }); +async function runSerializedWorkflowOperation( + fn: () => Promise, +): Promise { + // The shared DB adapter and workflow log base path are process-global, so + // workflow MCP mutations must not overlap within a single server process. + const prior = workflowExecutionQueue; + let release!: () => void; + workflowExecutionQueue = new Promise((resolve) => { + release = resolve; + }); - await prior; - try { - return await fn(); - } finally { - release(); - } + await prior; + try { + return await fn(); + } finally { + release(); + } } async function runSerializedWorkflowDbOperation( - projectDir: string, - fn: () => Promise, + projectDir: string, + fn: () => Promise, ): Promise { - return runSerializedWorkflowOperation(async () => { - const { ensureDbOpen } = await importLocalModule( - "../../../src/resources/extensions/sf/bootstrap/dynamic-tools.js", - ); - const dbAvailable = await ensureDbOpen(projectDir); - if (!dbAvailable) { - throw new Error("SF database is not available"); - } - return fn(); - }); + return runSerializedWorkflowOperation(async () => { + const { ensureDbOpen } = await importLocalModule( + "../../../src/resources/extensions/sf/bootstrap/dynamic-tools.js", + ); + const dbAvailable = await ensureDbOpen(projectDir); + if (!dbAvailable) { + throw new Error("SF database is not available"); + } + return fn(); + }); } async function enforceWorkflowWriteGate( - toolName: string, - projectDir: string, - milestoneId: string | null = null, + toolName: string, + projectDir: string, + milestoneId: string | null = null, ): Promise { - const writeGate = await getWorkflowWriteGateModule(); - const snapshot = writeGate.loadWriteGateSnapshot(projectDir); - const pendingGate = writeGate.shouldBlockPendingGateInSnapshot( - snapshot, - toolName, - milestoneId, - snapshot.activeQueuePhase, - ); - if (pendingGate.block) { - throw new Error(pendingGate.reason ?? "workflow tool blocked by pending discussion gate"); - } + const writeGate = await getWorkflowWriteGateModule(); + const snapshot = writeGate.loadWriteGateSnapshot(projectDir); + const pendingGate = writeGate.shouldBlockPendingGateInSnapshot( + snapshot, + toolName, + milestoneId, + snapshot.activeQueuePhase, + ); + if (pendingGate.block) { + throw new Error( + pendingGate.reason ?? "workflow tool blocked by pending discussion gate", + ); + } - const queueGuard = writeGate.shouldBlockQueueExecutionInSnapshot( - snapshot, - toolName, - "", - snapshot.activeQueuePhase, - ); - if (queueGuard.block) { - throw new Error(queueGuard.reason ?? "workflow tool blocked during queue mode"); - } + const queueGuard = writeGate.shouldBlockQueueExecutionInSnapshot( + snapshot, + toolName, + "", + snapshot.activeQueuePhase, + ); + if (queueGuard.block) { + throw new Error( + queueGuard.reason ?? "workflow tool blocked during queue mode", + ); + } } async function handleTaskComplete( - projectDir: string, - args: Omit, "projectDir">, + projectDir: string, + args: Omit, "projectDir">, ): Promise { - await enforceWorkflowWriteGate("sf_task_complete", projectDir, args.milestoneId); - const { - taskId, - sliceId, - milestoneId, - oneLiner, - narrative, - verification, - deviations, - knownIssues, - keyFiles, - keyDecisions, - blockerDiscovered, - verificationEvidence, - } = args; - const { executeTaskComplete } = await getWorkflowToolExecutors(); - return runSerializedWorkflowOperation(() => - executeTaskComplete( - { - taskId, - sliceId, - milestoneId, - oneLiner, - narrative, - verification, - deviations, - knownIssues, - keyFiles, - keyDecisions, - blockerDiscovered, - verificationEvidence, - }, - projectDir, - ), - ); + await enforceWorkflowWriteGate( + "sf_task_complete", + projectDir, + args.milestoneId, + ); + const { + taskId, + sliceId, + milestoneId, + oneLiner, + narrative, + verification, + deviations, + knownIssues, + keyFiles, + keyDecisions, + blockerDiscovered, + verificationEvidence, + } = args; + const { executeTaskComplete } = await getWorkflowToolExecutors(); + return runSerializedWorkflowOperation(() => + executeTaskComplete( + { + taskId, + sliceId, + milestoneId, + oneLiner, + narrative, + verification, + deviations, + knownIssues, + keyFiles, + keyDecisions, + blockerDiscovered, + verificationEvidence, + }, + projectDir, + ), + ); } async function handleSliceComplete( - projectDir: string, - args: z.infer, + projectDir: string, + args: z.infer, ): Promise { - await enforceWorkflowWriteGate("sf_slice_complete", projectDir, args.milestoneId); - const { executeSliceComplete } = await getWorkflowToolExecutors(); - const { projectDir: _projectDir, ...params } = args; - return runSerializedWorkflowOperation(() => executeSliceComplete(params, projectDir)); + await enforceWorkflowWriteGate( + "sf_slice_complete", + projectDir, + args.milestoneId, + ); + const { executeSliceComplete } = await getWorkflowToolExecutors(); + const { projectDir: _projectDir, ...params } = args; + return runSerializedWorkflowOperation(() => + executeSliceComplete(params, projectDir), + ); } async function handleReplanSlice( - projectDir: string, - args: z.infer, + projectDir: string, + args: z.infer, ): Promise { - await enforceWorkflowWriteGate("sf_replan_slice", projectDir, args.milestoneId); - const { executeReplanSlice } = await getWorkflowToolExecutors(); - const { projectDir: _projectDir, ...params } = args; - return runSerializedWorkflowOperation(() => executeReplanSlice(params, projectDir)); + await enforceWorkflowWriteGate( + "sf_replan_slice", + projectDir, + args.milestoneId, + ); + const { executeReplanSlice } = await getWorkflowToolExecutors(); + const { projectDir: _projectDir, ...params } = args; + return runSerializedWorkflowOperation(() => + executeReplanSlice(params, projectDir), + ); } async function handleCompleteMilestone( - projectDir: string, - args: z.infer, + projectDir: string, + args: z.infer, ): Promise { - await enforceWorkflowWriteGate("sf_complete_milestone", projectDir, args.milestoneId); - const { executeCompleteMilestone } = await getWorkflowToolExecutors(); - const { projectDir: _projectDir, ...params } = args; - return runSerializedWorkflowOperation(() => executeCompleteMilestone(params, projectDir)); + await enforceWorkflowWriteGate( + "sf_complete_milestone", + projectDir, + args.milestoneId, + ); + const { executeCompleteMilestone } = await getWorkflowToolExecutors(); + const { projectDir: _projectDir, ...params } = args; + return runSerializedWorkflowOperation(() => + executeCompleteMilestone(params, projectDir), + ); } async function handleValidateMilestone( - projectDir: string, - args: z.infer, + projectDir: string, + args: z.infer, ): Promise { - await enforceWorkflowWriteGate("sf_validate_milestone", projectDir, args.milestoneId); - const { executeValidateMilestone } = await getWorkflowToolExecutors(); - const { projectDir: _projectDir, ...params } = args; - return runSerializedWorkflowOperation(() => executeValidateMilestone(params, projectDir)); + await enforceWorkflowWriteGate( + "sf_validate_milestone", + projectDir, + args.milestoneId, + ); + const { executeValidateMilestone } = await getWorkflowToolExecutors(); + const { projectDir: _projectDir, ...params } = args; + return runSerializedWorkflowOperation(() => + executeValidateMilestone(params, projectDir), + ); } async function handleReassessRoadmap( - projectDir: string, - args: z.infer, + projectDir: string, + args: z.infer, ): Promise { - await enforceWorkflowWriteGate("sf_reassess_roadmap", projectDir, args.milestoneId); - const { executeReassessRoadmap } = await getWorkflowToolExecutors(); - const { projectDir: _projectDir, ...params } = args; - return runSerializedWorkflowOperation(() => executeReassessRoadmap(params, projectDir)); + await enforceWorkflowWriteGate( + "sf_reassess_roadmap", + projectDir, + args.milestoneId, + ); + const { executeReassessRoadmap } = await getWorkflowToolExecutors(); + const { projectDir: _projectDir, ...params } = args; + return runSerializedWorkflowOperation(() => + executeReassessRoadmap(params, projectDir), + ); } async function handleSaveGateResult( - projectDir: string, - args: z.infer, + projectDir: string, + args: z.infer, ): Promise { - await enforceWorkflowWriteGate("sf_save_gate_result", projectDir, args.milestoneId); - const { executeSaveGateResult } = await getWorkflowToolExecutors(); - const { projectDir: _projectDir, ...params } = args; - return runSerializedWorkflowOperation(() => executeSaveGateResult(params, projectDir)); + await enforceWorkflowWriteGate( + "sf_save_gate_result", + projectDir, + args.milestoneId, + ); + const { executeSaveGateResult } = await getWorkflowToolExecutors(); + const { projectDir: _projectDir, ...params } = args; + return runSerializedWorkflowOperation(() => + executeSaveGateResult(params, projectDir), + ); } async function ensureMilestoneDbRow(milestoneId: string): Promise { - try { - const { insertMilestone } = await importLocalModule("../../../src/resources/extensions/sf/sf-db.js"); - insertMilestone({ id: milestoneId, status: "queued" }); - } catch { - // Ignore pre-existing rows or transient DB availability issues. - } + try { + const { insertMilestone } = await importLocalModule( + "../../../src/resources/extensions/sf/sf-db.js", + ); + insertMilestone({ id: milestoneId, status: "queued" }); + } catch { + // Ignore pre-existing rows or transient DB availability issues. + } } -const projectDirParam = z.string().describe("Absolute path to the project directory within the configured workflow root"); +const projectDirParam = z + .string() + .describe( + "Absolute path to the project directory within the configured workflow root", + ); const planMilestoneParams = { - projectDir: projectDirParam, - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - title: z.string().describe("Milestone title"), - vision: z.string().describe("Milestone vision"), - slices: z.array(z.object({ - sliceId: z.string(), - title: z.string(), - risk: z.string(), - depends: z.array(z.string()), - demo: z.string(), - goal: z.string(), - successCriteria: z.string(), - proofLevel: z.string(), - integrationClosure: z.string(), - observabilityImpact: z.string(), - })).optional().describe("Planned slices for the milestone. Optional when templateId is used for scaffolding."), - templateId: z.string().optional().describe("Optional milestone template scaffold (e.g. bugfix, small-feature, refactor)"), - status: z.string().optional().describe("Milestone status"), - dependsOn: z.array(z.string()).optional().describe("Milestone dependencies"), - successCriteria: z.array(z.string()).optional().describe("Top-level success criteria bullets"), - keyRisks: z.array(z.object({ - risk: z.string(), - whyItMatters: z.string(), - })).optional().describe("Structured risk entries"), - proofStrategy: z.array(z.object({ - riskOrUnknown: z.string(), - retireIn: z.string(), - whatWillBeProven: z.string(), - })).optional().describe("Structured proof strategy entries"), - verificationContract: z.string().optional(), - verificationIntegration: z.string().optional(), - verificationOperational: z.string().optional(), - verificationUat: z.string().optional(), - definitionOfDone: z.array(z.string()).optional(), - requirementCoverage: z.string().optional(), - boundaryMapMarkdown: z.string().optional(), - visionMeeting: z.object({ - trigger: z.string(), - pm: z.string(), - userAdvocate: z.string(), - customerPanel: z.string(), - business: z.string(), - researcher: z.string(), - deliveryLead: z.string(), - partner: z.string(), - combatant: z.string(), - architect: z.string(), - moderator: z.string(), - weightedSynthesis: z.string(), - confidenceByArea: z.string(), - recommendedRoute: z.enum(["discussing", "researching", "planning"]), - }).optional().describe("Structured top-level vision and roadmap alignment meeting with weighted synthesis"), + projectDir: projectDirParam, + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + title: z.string().describe("Milestone title"), + vision: z.string().describe("Milestone vision"), + slices: z + .array( + z.object({ + sliceId: z.string(), + title: z.string(), + risk: z.string(), + depends: z.array(z.string()), + demo: z.string(), + goal: z.string(), + successCriteria: z.string(), + proofLevel: z.string(), + integrationClosure: z.string(), + observabilityImpact: z.string(), + }), + ) + .optional() + .describe( + "Planned slices for the milestone. Optional when templateId is used for scaffolding.", + ), + templateId: z + .string() + .optional() + .describe( + "Optional milestone template scaffold (e.g. bugfix, small-feature, refactor)", + ), + status: z.string().optional().describe("Milestone status"), + dependsOn: z.array(z.string()).optional().describe("Milestone dependencies"), + successCriteria: z + .array(z.string()) + .optional() + .describe("Top-level success criteria bullets"), + keyRisks: z + .array( + z.object({ + risk: z.string(), + whyItMatters: z.string(), + }), + ) + .optional() + .describe("Structured risk entries"), + proofStrategy: z + .array( + z.object({ + riskOrUnknown: z.string(), + retireIn: z.string(), + whatWillBeProven: z.string(), + }), + ) + .optional() + .describe("Structured proof strategy entries"), + verificationContract: z.string().optional(), + verificationIntegration: z.string().optional(), + verificationOperational: z.string().optional(), + verificationUat: z.string().optional(), + definitionOfDone: z.array(z.string()).optional(), + requirementCoverage: z.string().optional(), + boundaryMapMarkdown: z.string().optional(), + visionMeeting: z + .object({ + trigger: z.string(), + pm: z.string(), + userAdvocate: z.string(), + customerPanel: z.string(), + business: z.string(), + researcher: z.string(), + deliveryLead: z.string(), + partner: z.string(), + combatant: z.string(), + architect: z.string(), + moderator: z.string(), + weightedSynthesis: z.string(), + confidenceByArea: z.string(), + recommendedRoute: z.enum(["discussing", "researching", "planning"]), + }) + .optional() + .describe( + "Structured top-level vision and roadmap alignment meeting with weighted synthesis", + ), }; const planMilestoneSchema = z.object(planMilestoneParams); const planSliceParams = { - projectDir: projectDirParam, - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - sliceId: z.string().describe("Slice ID (e.g. S01)"), - goal: z.string().describe("Slice goal"), - adversarialReview: z.object({ - partner: z.string(), - combatant: z.string(), - architect: z.string(), - }).optional().describe("Adversarial review summary with partner, combatant, and architect perspectives"), - planningMeeting: z.object({ - trigger: z.string(), - pm: z.string(), - userAdvocate: z.string().optional(), - customerPanel: z.string().optional(), - business: z.string().optional(), - researcher: z.string(), - deliveryLead: z.string().optional(), - partner: z.string(), - combatant: z.string(), - architect: z.string(), - moderator: z.string(), - recommendedRoute: z.enum(["discussing", "researching", "planning"]), - confidenceSummary: z.string(), - }).describe("Required populated planning meeting. Empty, null, or missing planningMeeting is not acceptable."), - tasks: z.array(z.object({ - taskId: z.string(), - title: z.string(), - description: z.string(), - estimate: z.string(), - files: z.array(z.string()), - verify: z.string(), - inputs: z.array(z.string()), - expectedOutput: z.array(z.string()), - observabilityImpact: z.string().optional(), - })).describe("Planned tasks for the slice"), - successCriteria: z.string().optional(), - proofLevel: z.string().optional(), - integrationClosure: z.string().optional(), - observabilityImpact: z.string().optional(), + projectDir: projectDirParam, + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + sliceId: z.string().describe("Slice ID (e.g. S01)"), + goal: z.string().describe("Slice goal"), + adversarialReview: z + .object({ + partner: z.string(), + combatant: z.string(), + architect: z.string(), + }) + .optional() + .describe( + "Adversarial review summary with partner, combatant, and architect perspectives", + ), + planningMeeting: z + .object({ + trigger: z.string(), + pm: z.string(), + userAdvocate: z.string().optional(), + customerPanel: z.string().optional(), + business: z.string().optional(), + researcher: z.string(), + deliveryLead: z.string().optional(), + partner: z.string(), + combatant: z.string(), + architect: z.string(), + moderator: z.string(), + recommendedRoute: z.enum(["discussing", "researching", "planning"]), + confidenceSummary: z.string(), + }) + .describe( + "Required populated planning meeting. Empty, null, or missing planningMeeting is not acceptable.", + ), + tasks: z + .array( + z.object({ + taskId: z.string(), + title: z.string(), + description: z.string(), + estimate: z.string(), + files: z.array(z.string()), + verify: z.string(), + inputs: z.array(z.string()), + expectedOutput: z.array(z.string()), + observabilityImpact: z.string().optional(), + }), + ) + .describe("Planned tasks for the slice"), + successCriteria: z.string().optional(), + proofLevel: z.string().optional(), + integrationClosure: z.string().optional(), + observabilityImpact: z.string().optional(), }; const planSliceSchema = z.object(planSliceParams); const completeMilestoneParams = { - projectDir: projectDirParam, - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - title: z.string().describe("Milestone title"), - oneLiner: z.string().describe("One-sentence summary of what the milestone achieved"), - narrative: z.string().describe("Detailed narrative of what happened during the milestone"), - verificationPassed: z.boolean().describe("Must be true after milestone verification succeeds"), - successCriteriaResults: z.string().optional(), - definitionOfDoneResults: z.string().optional(), - requirementOutcomes: z.string().optional(), - keyDecisions: z.array(z.string()).optional(), - keyFiles: z.array(z.string()).optional(), - lessonsLearned: z.array(z.string()).optional(), - followUps: z.string().optional(), - deviations: z.string().optional(), + projectDir: projectDirParam, + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + title: z.string().describe("Milestone title"), + oneLiner: z + .string() + .describe("One-sentence summary of what the milestone achieved"), + narrative: z + .string() + .describe("Detailed narrative of what happened during the milestone"), + verificationPassed: z + .boolean() + .describe("Must be true after milestone verification succeeds"), + successCriteriaResults: z.string().optional(), + definitionOfDoneResults: z.string().optional(), + requirementOutcomes: z.string().optional(), + keyDecisions: z.array(z.string()).optional(), + keyFiles: z.array(z.string()).optional(), + lessonsLearned: z.array(z.string()).optional(), + followUps: z.string().optional(), + deviations: z.string().optional(), }; const completeMilestoneSchema = z.object(completeMilestoneParams); const validateMilestoneParams = { - projectDir: projectDirParam, - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - verdict: z.enum(["pass", "needs-attention", "needs-remediation"]).describe("Validation verdict"), - remediationRound: z.number().describe("Remediation round (0 for first validation)"), - successCriteriaChecklist: z.string().describe("Markdown checklist of success criteria with evidence"), - sliceDeliveryAudit: z.string().describe("Markdown auditing each slice's claimed vs delivered output"), - crossSliceIntegration: z.string().describe("Markdown describing cross-slice issues or closure"), - requirementCoverage: z.string().describe("Markdown describing requirement coverage and gaps"), - verificationClasses: z.string().optional(), - verdictRationale: z.string().describe("Why this verdict was chosen"), - remediationPlan: z.string().optional(), + projectDir: projectDirParam, + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + verdict: z + .enum(["pass", "needs-attention", "needs-remediation"]) + .describe("Validation verdict"), + remediationRound: z + .number() + .describe("Remediation round (0 for first validation)"), + successCriteriaChecklist: z + .string() + .describe("Markdown checklist of success criteria with evidence"), + sliceDeliveryAudit: z + .string() + .describe("Markdown auditing each slice's claimed vs delivered output"), + crossSliceIntegration: z + .string() + .describe("Markdown describing cross-slice issues or closure"), + requirementCoverage: z + .string() + .describe("Markdown describing requirement coverage and gaps"), + verificationClasses: z.string().optional(), + verdictRationale: z.string().describe("Why this verdict was chosen"), + remediationPlan: z.string().optional(), }; const validateMilestoneSchema = z.object(validateMilestoneParams); const roadmapSliceChangeSchema = z.object({ - sliceId: z.string(), - title: z.string(), - risk: z.string().optional(), - depends: z.array(z.string()).optional(), - demo: z.string().optional(), + sliceId: z.string(), + title: z.string(), + risk: z.string().optional(), + depends: z.array(z.string()).optional(), + demo: z.string().optional(), }); const reassessRoadmapParams = { - projectDir: projectDirParam, - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - completedSliceId: z.string().describe("Slice ID that just completed"), - verdict: z.string().describe("Assessment verdict such as roadmap-confirmed or roadmap-adjusted"), - assessment: z.string().describe("Assessment text explaining the roadmap decision"), - sliceChanges: z.object({ - modified: z.array(roadmapSliceChangeSchema), - added: z.array(roadmapSliceChangeSchema), - removed: z.array(z.string()), - }).describe("Slice changes to apply"), + projectDir: projectDirParam, + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + completedSliceId: z.string().describe("Slice ID that just completed"), + verdict: z + .string() + .describe( + "Assessment verdict such as roadmap-confirmed or roadmap-adjusted", + ), + assessment: z + .string() + .describe("Assessment text explaining the roadmap decision"), + sliceChanges: z + .object({ + modified: z.array(roadmapSliceChangeSchema), + added: z.array(roadmapSliceChangeSchema), + removed: z.array(z.string()), + }) + .describe("Slice changes to apply"), }; const reassessRoadmapSchema = z.object(reassessRoadmapParams); const saveGateResultParams = { - projectDir: projectDirParam, - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - sliceId: z.string().describe("Slice ID (e.g. S01)"), - gateId: z.enum(["Q3", "Q4", "Q5", "Q6", "Q7", "Q8", "MV01", "MV02", "MV03", "MV04"]).describe("Gate ID"), - taskId: z.string().optional().describe("Task ID for task-scoped gates"), - verdict: z.enum(["pass", "flag", "omitted"]).describe("Gate verdict"), - rationale: z.string().describe("One-sentence justification"), - findings: z.string().optional().describe("Detailed markdown findings"), + projectDir: projectDirParam, + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + sliceId: z.string().describe("Slice ID (e.g. S01)"), + gateId: z + .enum(["Q3", "Q4", "Q5", "Q6", "Q7", "Q8", "MV01", "MV02", "MV03", "MV04"]) + .describe("Gate ID"), + taskId: z.string().optional().describe("Task ID for task-scoped gates"), + verdict: z.enum(["pass", "flag", "omitted"]).describe("Gate verdict"), + rationale: z.string().describe("One-sentence justification"), + findings: z.string().optional().describe("Detailed markdown findings"), }; const saveGateResultSchema = z.object(saveGateResultParams); const replanSliceParams = { - projectDir: projectDirParam, - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - sliceId: z.string().describe("Slice ID (e.g. S01)"), - blockerTaskId: z.string().describe("Task ID that discovered the blocker"), - blockerDescription: z.string().describe("Description of the blocker"), - whatChanged: z.string().describe("Summary of what changed in the plan"), - goal: z.string().optional().describe("Updated slice goal when the replan changes the slice contract"), - successCriteria: z.string().optional().describe("Updated slice success criteria block"), - proofLevel: z.string().optional().describe("Updated slice proof level"), - integrationClosure: z.string().optional().describe("Updated slice integration closure"), - observabilityImpact: z.string().optional().describe("Updated slice observability impact"), - adversarialReview: z.object({ - partner: z.string(), - combatant: z.string(), - architect: z.string(), - }).optional().describe("Updated adversarial review summary for the replanned slice"), - planningMeeting: z.object({ - trigger: z.string(), - pm: z.string(), - researcher: z.string(), - partner: z.string(), - combatant: z.string(), - architect: z.string(), - moderator: z.string(), - recommendedRoute: z.enum(["discussing", "researching", "planning"]), - confidenceSummary: z.string(), - }).optional().describe("Updated structured planning meeting artifact for the replanned slice"), - updatedTasks: z.array(z.object({ - taskId: z.string(), - title: z.string(), - description: z.string(), - estimate: z.string(), - files: z.array(z.string()), - verify: z.string(), - inputs: z.array(z.string()), - expectedOutput: z.array(z.string()), - fullPlanMd: z.string().optional(), - })).describe("Tasks to upsert into the replanned slice"), - removedTaskIds: z.array(z.string()).describe("Task IDs to remove from the slice"), + projectDir: projectDirParam, + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + sliceId: z.string().describe("Slice ID (e.g. S01)"), + blockerTaskId: z.string().describe("Task ID that discovered the blocker"), + blockerDescription: z.string().describe("Description of the blocker"), + whatChanged: z.string().describe("Summary of what changed in the plan"), + goal: z + .string() + .optional() + .describe("Updated slice goal when the replan changes the slice contract"), + successCriteria: z + .string() + .optional() + .describe("Updated slice success criteria block"), + proofLevel: z.string().optional().describe("Updated slice proof level"), + integrationClosure: z + .string() + .optional() + .describe("Updated slice integration closure"), + observabilityImpact: z + .string() + .optional() + .describe("Updated slice observability impact"), + adversarialReview: z + .object({ + partner: z.string(), + combatant: z.string(), + architect: z.string(), + }) + .optional() + .describe("Updated adversarial review summary for the replanned slice"), + planningMeeting: z + .object({ + trigger: z.string(), + pm: z.string(), + researcher: z.string(), + partner: z.string(), + combatant: z.string(), + architect: z.string(), + moderator: z.string(), + recommendedRoute: z.enum(["discussing", "researching", "planning"]), + confidenceSummary: z.string(), + }) + .optional() + .describe( + "Updated structured planning meeting artifact for the replanned slice", + ), + updatedTasks: z + .array( + z.object({ + taskId: z.string(), + title: z.string(), + description: z.string(), + estimate: z.string(), + files: z.array(z.string()), + verify: z.string(), + inputs: z.array(z.string()), + expectedOutput: z.array(z.string()), + fullPlanMd: z.string().optional(), + }), + ) + .describe("Tasks to upsert into the replanned slice"), + removedTaskIds: z + .array(z.string()) + .describe("Task IDs to remove from the slice"), }; const replanSliceSchema = z.object(replanSliceParams); const sliceCompleteParams = { - projectDir: projectDirParam, - sliceId: z.string().describe("Slice ID (e.g. S01)"), - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - sliceTitle: z.string().describe("Title of the slice"), - oneLiner: z.string().describe("One-line summary of what the slice accomplished"), - narrative: z.string().describe("Detailed narrative of what happened across all tasks"), - verification: z.string().describe("What was verified across all tasks"), - uatContent: z.string().describe("UAT test content (markdown body)"), - deviations: z.string().optional(), - knownLimitations: z.string().optional(), - followUps: z.string().optional(), - keyFiles: z.union([z.array(z.string()), z.string()]).optional(), - keyDecisions: z.union([z.array(z.string()), z.string()]).optional(), - patternsEstablished: z.union([z.array(z.string()), z.string()]).optional(), - observabilitySurfaces: z.union([z.array(z.string()), z.string()]).optional(), - provides: z.union([z.array(z.string()), z.string()]).optional(), - requirementsSurfaced: z.union([z.array(z.string()), z.string()]).optional(), - drillDownPaths: z.union([z.array(z.string()), z.string()]).optional(), - affects: z.union([z.array(z.string()), z.string()]).optional(), - requirementsAdvanced: z.array(z.union([ - z.object({ id: z.string(), how: z.string() }), - z.string(), - ])).optional(), - requirementsValidated: z.array(z.union([ - z.object({ id: z.string(), proof: z.string() }), - z.string(), - ])).optional(), - requirementsInvalidated: z.array(z.union([ - z.object({ id: z.string(), what: z.string() }), - z.string(), - ])).optional(), - filesModified: z.array(z.union([ - z.object({ path: z.string(), description: z.string() }), - z.string(), - ])).optional(), - requires: z.array(z.union([ - z.object({ slice: z.string(), provides: z.string() }), - z.string(), - ])).optional(), + projectDir: projectDirParam, + sliceId: z.string().describe("Slice ID (e.g. S01)"), + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + sliceTitle: z.string().describe("Title of the slice"), + oneLiner: z + .string() + .describe("One-line summary of what the slice accomplished"), + narrative: z + .string() + .describe("Detailed narrative of what happened across all tasks"), + verification: z.string().describe("What was verified across all tasks"), + uatContent: z.string().describe("UAT test content (markdown body)"), + deviations: z.string().optional(), + knownLimitations: z.string().optional(), + followUps: z.string().optional(), + keyFiles: z.union([z.array(z.string()), z.string()]).optional(), + keyDecisions: z.union([z.array(z.string()), z.string()]).optional(), + patternsEstablished: z.union([z.array(z.string()), z.string()]).optional(), + observabilitySurfaces: z.union([z.array(z.string()), z.string()]).optional(), + provides: z.union([z.array(z.string()), z.string()]).optional(), + requirementsSurfaced: z.union([z.array(z.string()), z.string()]).optional(), + drillDownPaths: z.union([z.array(z.string()), z.string()]).optional(), + affects: z.union([z.array(z.string()), z.string()]).optional(), + requirementsAdvanced: z + .array(z.union([z.object({ id: z.string(), how: z.string() }), z.string()])) + .optional(), + requirementsValidated: z + .array( + z.union([z.object({ id: z.string(), proof: z.string() }), z.string()]), + ) + .optional(), + requirementsInvalidated: z + .array( + z.union([z.object({ id: z.string(), what: z.string() }), z.string()]), + ) + .optional(), + filesModified: z + .array( + z.union([ + z.object({ path: z.string(), description: z.string() }), + z.string(), + ]), + ) + .optional(), + requires: z + .array( + z.union([ + z.object({ slice: z.string(), provides: z.string() }), + z.string(), + ]), + ) + .optional(), }; const sliceCompleteSchema = z.object(sliceCompleteParams); const summarySaveParams = { - projectDir: projectDirParam, - milestone_id: z.string().describe("Milestone ID (e.g. M001)"), - slice_id: z.string().optional().describe("Slice ID (e.g. S01)"), - task_id: z.string().optional().describe("Task ID (e.g. T01)"), - artifact_type: z.string().describe("Artifact type to save (SUMMARY, RESEARCH, CONTEXT, ASSESSMENT, CONTEXT-DRAFT)"), - content: z.string().describe("The full markdown content of the artifact"), + projectDir: projectDirParam, + milestone_id: z.string().describe("Milestone ID (e.g. M001)"), + slice_id: z.string().optional().describe("Slice ID (e.g. S01)"), + task_id: z.string().optional().describe("Task ID (e.g. T01)"), + artifact_type: z + .string() + .describe( + "Artifact type to save (SUMMARY, RESEARCH, CONTEXT, ASSESSMENT, CONTEXT-DRAFT)", + ), + content: z.string().describe("The full markdown content of the artifact"), }; const summarySaveSchema = z.object(summarySaveParams); const decisionSaveParams = { - projectDir: projectDirParam, - scope: z.string().describe("Scope of the decision (e.g. architecture, library, observability)"), - decision: z.string().describe("What is being decided"), - choice: z.string().describe("The choice made"), - rationale: z.string().describe("Why this choice was made"), - revisable: z.string().optional().describe("Whether this can be revisited"), - when_context: z.string().optional().describe("When/context for the decision"), - made_by: z.enum(["human", "agent", "collaborative"]).optional().describe("Who made the decision"), + projectDir: projectDirParam, + scope: z + .string() + .describe( + "Scope of the decision (e.g. architecture, library, observability)", + ), + decision: z.string().describe("What is being decided"), + choice: z.string().describe("The choice made"), + rationale: z.string().describe("Why this choice was made"), + revisable: z.string().optional().describe("Whether this can be revisited"), + when_context: z.string().optional().describe("When/context for the decision"), + made_by: z + .enum(["human", "agent", "collaborative"]) + .optional() + .describe("Who made the decision"), }; const decisionSaveSchema = z.object(decisionSaveParams); const requirementUpdateParams = { - projectDir: projectDirParam, - id: z.string().describe("Requirement ID (e.g. R001)"), - status: z.string().optional().describe("New status"), - validation: z.string().optional().describe("Validation criteria or proof"), - notes: z.string().optional().describe("Additional notes"), - description: z.string().optional().describe("Updated description"), - primary_owner: z.string().optional().describe("Primary owning slice"), - supporting_slices: z.string().optional().describe("Supporting slices"), + projectDir: projectDirParam, + id: z.string().describe("Requirement ID (e.g. R001)"), + status: z.string().optional().describe("New status"), + validation: z.string().optional().describe("Validation criteria or proof"), + notes: z.string().optional().describe("Additional notes"), + description: z.string().optional().describe("Updated description"), + primary_owner: z.string().optional().describe("Primary owning slice"), + supporting_slices: z.string().optional().describe("Supporting slices"), }; const requirementUpdateSchema = z.object(requirementUpdateParams); const requirementSaveParams = { - projectDir: projectDirParam, - class: z.string().describe("Requirement class"), - description: z.string().describe("Short description of the requirement"), - why: z.string().describe("Why this requirement matters"), - source: z.string().describe("Origin of the requirement"), - status: z.string().optional().describe("Requirement status"), - primary_owner: z.string().optional().describe("Primary owning slice"), - supporting_slices: z.string().optional().describe("Supporting slices"), - validation: z.string().optional().describe("Validation criteria"), - notes: z.string().optional().describe("Additional notes"), + projectDir: projectDirParam, + class: z.string().describe("Requirement class"), + description: z.string().describe("Short description of the requirement"), + why: z.string().describe("Why this requirement matters"), + source: z.string().describe("Origin of the requirement"), + status: z.string().optional().describe("Requirement status"), + primary_owner: z.string().optional().describe("Primary owning slice"), + supporting_slices: z.string().optional().describe("Supporting slices"), + validation: z.string().optional().describe("Validation criteria"), + notes: z.string().optional().describe("Additional notes"), }; const requirementSaveSchema = z.object(requirementSaveParams); const milestoneGenerateIdParams = { - projectDir: projectDirParam, + projectDir: projectDirParam, }; const milestoneGenerateIdSchema = z.object(milestoneGenerateIdParams); const planTaskParams = { - projectDir: projectDirParam, - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - sliceId: z.string().describe("Slice ID (e.g. S01)"), - taskId: z.string().describe("Task ID (e.g. T01)"), - title: z.string().describe("Task title"), - description: z.string().describe("Task description / steps block"), - estimate: z.string().describe("Task estimate"), - files: z.array(z.string()).describe("Files likely touched"), - verify: z.string().describe("Verification command or block"), - inputs: z.array(z.string()).describe("Input files or references"), - expectedOutput: z.array(z.string()).describe("Expected output files or artifacts"), - observabilityImpact: z.string().optional().describe("Task observability impact"), + projectDir: projectDirParam, + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + sliceId: z.string().describe("Slice ID (e.g. S01)"), + taskId: z.string().describe("Task ID (e.g. T01)"), + title: z.string().describe("Task title"), + description: z.string().describe("Task description / steps block"), + estimate: z.string().describe("Task estimate"), + files: z.array(z.string()).describe("Files likely touched"), + verify: z.string().describe("Verification command or block"), + inputs: z.array(z.string()).describe("Input files or references"), + expectedOutput: z + .array(z.string()) + .describe("Expected output files or artifacts"), + observabilityImpact: z + .string() + .optional() + .describe("Task observability impact"), }; const planTaskSchema = z.object(planTaskParams); const skipSliceParams = { - projectDir: projectDirParam, - sliceId: z.string().describe("Slice ID (e.g. S02)"), - milestoneId: z.string().describe("Milestone ID (e.g. M003)"), - reason: z.string().optional().describe("Reason for skipping this slice"), + projectDir: projectDirParam, + sliceId: z.string().describe("Slice ID (e.g. S02)"), + milestoneId: z.string().describe("Milestone ID (e.g. M003)"), + reason: z.string().optional().describe("Reason for skipping this slice"), }; const skipSliceSchema = z.object(skipSliceParams); const taskCompleteParams = { - projectDir: projectDirParam, - taskId: z.string().describe("Task ID (e.g. T01)"), - sliceId: z.string().describe("Slice ID (e.g. S01)"), - milestoneId: z.string().describe("Milestone ID (e.g. M001)"), - oneLiner: z.string().describe("One-line summary of what was accomplished"), - narrative: z.string().describe("Detailed narrative of what happened during the task"), - verification: z.string().describe("What was verified and how"), - deviations: z.string().optional().describe("Deviations from the task plan"), - knownIssues: z.string().optional().describe("Known issues discovered but not fixed"), - keyFiles: z.array(z.string()).optional().describe("List of key files created or modified"), - keyDecisions: z.array(z.string()).optional().describe("List of key decisions made during this task"), - blockerDiscovered: z.boolean().optional().describe("Whether a plan-invalidating blocker was discovered"), - verificationEvidence: z.array(z.union([ - z.object({ - command: z.string(), - exitCode: z.number(), - verdict: z.string(), - durationMs: z.number(), - }), - z.string(), - ])).optional().describe("Verification evidence entries"), + projectDir: projectDirParam, + taskId: z.string().describe("Task ID (e.g. T01)"), + sliceId: z.string().describe("Slice ID (e.g. S01)"), + milestoneId: z.string().describe("Milestone ID (e.g. M001)"), + oneLiner: z.string().describe("One-line summary of what was accomplished"), + narrative: z + .string() + .describe("Detailed narrative of what happened during the task"), + verification: z.string().describe("What was verified and how"), + deviations: z.string().optional().describe("Deviations from the task plan"), + knownIssues: z + .string() + .optional() + .describe("Known issues discovered but not fixed"), + keyFiles: z + .array(z.string()) + .optional() + .describe("List of key files created or modified"), + keyDecisions: z + .array(z.string()) + .optional() + .describe("List of key decisions made during this task"), + blockerDiscovered: z + .boolean() + .optional() + .describe("Whether a plan-invalidating blocker was discovered"), + verificationEvidence: z + .array( + z.union([ + z.object({ + command: z.string(), + exitCode: z.number(), + verdict: z.string(), + durationMs: z.number(), + }), + z.string(), + ]), + ) + .optional() + .describe("Verification evidence entries"), }; const taskCompleteSchema = z.object(taskCompleteParams); const milestoneStatusParams = { - projectDir: projectDirParam, - milestoneId: z.string().describe("Milestone ID to query (e.g. M001)"), + projectDir: projectDirParam, + milestoneId: z.string().describe("Milestone ID to query (e.g. M001)"), }; const milestoneStatusSchema = z.object(milestoneStatusParams); const journalQueryParams = { - projectDir: projectDirParam, - flowId: z.string().optional().describe("Filter by flow ID"), - unitId: z.string().optional().describe("Filter by unit ID"), - rule: z.string().optional().describe("Filter by rule name"), - eventType: z.string().optional().describe("Filter by event type"), - after: z.string().optional().describe("ISO-8601 lower bound (inclusive)"), - before: z.string().optional().describe("ISO-8601 upper bound (inclusive)"), - limit: z.number().optional().describe("Maximum entries to return"), + projectDir: projectDirParam, + flowId: z.string().optional().describe("Filter by flow ID"), + unitId: z.string().optional().describe("Filter by unit ID"), + rule: z.string().optional().describe("Filter by rule name"), + eventType: z.string().optional().describe("Filter by event type"), + after: z.string().optional().describe("ISO-8601 lower bound (inclusive)"), + before: z.string().optional().describe("ISO-8601 upper bound (inclusive)"), + limit: z.number().optional().describe("Maximum entries to return"), }; const journalQuerySchema = z.object(journalQueryParams); export function registerWorkflowTools(server: McpToolServer): void { - server.tool( - "sf_decision_save", - "Record a project decision to the SF database and regenerate DECISIONS.md.", - decisionSaveParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(decisionSaveSchema, args); - const { projectDir, ...params } = parsed; - await enforceWorkflowWriteGate("sf_decision_save", projectDir); - const result = await runSerializedWorkflowDbOperation(projectDir, async () => { - const { saveDecisionToDb } = await importLocalModule("../../../src/resources/extensions/sf/db-writer.js"); - return saveDecisionToDb(params, projectDir); - }); - return { content: [{ type: "text" as const, text: `Saved decision ${result.id}` }] }; - }, - ); + server.tool( + "sf_decision_save", + "Record a project decision to the SF database and regenerate DECISIONS.md.", + decisionSaveParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(decisionSaveSchema, args); + const { projectDir, ...params } = parsed; + await enforceWorkflowWriteGate("sf_decision_save", projectDir); + const result = await runSerializedWorkflowDbOperation( + projectDir, + async () => { + const { saveDecisionToDb } = await importLocalModule( + "../../../src/resources/extensions/sf/db-writer.js", + ); + return saveDecisionToDb(params, projectDir); + }, + ); + return { + content: [ + { type: "text" as const, text: `Saved decision ${result.id}` }, + ], + }; + }, + ); - server.tool( - "sf_save_decision", - "Alias for sf_decision_save. Record a project decision to the SF database and regenerate DECISIONS.md.", - decisionSaveParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(decisionSaveSchema, args); - const { projectDir, ...params } = parsed; - await enforceWorkflowWriteGate("sf_decision_save", projectDir); - const result = await runSerializedWorkflowDbOperation(projectDir, async () => { - const { saveDecisionToDb } = await importLocalModule("../../../src/resources/extensions/sf/db-writer.js"); - return saveDecisionToDb(params, projectDir); - }); - return { content: [{ type: "text" as const, text: `Saved decision ${result.id}` }] }; - }, - ); + server.tool( + "sf_requirement_update", + "Update an existing requirement in the SF database and regenerate REQUIREMENTS.md.", + requirementUpdateParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(requirementUpdateSchema, args); + const { projectDir, id, ...updates } = parsed; + await enforceWorkflowWriteGate("sf_requirement_update", projectDir); + await runSerializedWorkflowDbOperation(projectDir, async () => { + const { updateRequirementInDb } = await importLocalModule( + "../../../src/resources/extensions/sf/db-writer.js", + ); + return updateRequirementInDb(id, updates, projectDir); + }); + return { + content: [{ type: "text" as const, text: `Updated requirement ${id}` }], + }; + }, + ); - server.tool( - "sf_requirement_update", - "Update an existing requirement in the SF database and regenerate REQUIREMENTS.md.", - requirementUpdateParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(requirementUpdateSchema, args); - const { projectDir, id, ...updates } = parsed; - await enforceWorkflowWriteGate("sf_requirement_update", projectDir); - await runSerializedWorkflowDbOperation(projectDir, async () => { - const { updateRequirementInDb } = await importLocalModule("../../../src/resources/extensions/sf/db-writer.js"); - return updateRequirementInDb(id, updates, projectDir); - }); - return { content: [{ type: "text" as const, text: `Updated requirement ${id}` }] }; - }, - ); + server.tool( + "sf_requirement_save", + "Record a new requirement to the SF database and regenerate REQUIREMENTS.md.", + requirementSaveParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(requirementSaveSchema, args); + const { projectDir, ...params } = parsed; + await enforceWorkflowWriteGate("sf_requirement_save", projectDir); + const result = await runSerializedWorkflowDbOperation( + projectDir, + async () => { + const { saveRequirementToDb } = await importLocalModule( + "../../../src/resources/extensions/sf/db-writer.js", + ); + return saveRequirementToDb(params, projectDir); + }, + ); + return { + content: [ + { type: "text" as const, text: `Saved requirement ${result.id}` }, + ], + }; + }, + ); - server.tool( - "sf_update_requirement", - "Alias for sf_requirement_update. Update an existing requirement in the SF database and regenerate REQUIREMENTS.md.", - requirementUpdateParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(requirementUpdateSchema, args); - const { projectDir, id, ...updates } = parsed; - await enforceWorkflowWriteGate("sf_requirement_update", projectDir); - await runSerializedWorkflowDbOperation(projectDir, async () => { - const { updateRequirementInDb } = await importLocalModule("../../../src/resources/extensions/sf/db-writer.js"); - return updateRequirementInDb(id, updates, projectDir); - }); - return { content: [{ type: "text" as const, text: `Updated requirement ${id}` }] }; - }, - ); + server.tool( + "sf_milestone_generate_id", + "Generate the next milestone ID for a new SF milestone.", + milestoneGenerateIdParams, + async (args: Record) => { + const { projectDir } = parseWorkflowArgs(milestoneGenerateIdSchema, args); + await enforceWorkflowWriteGate("sf_milestone_generate_id", projectDir); + const id = await runSerializedWorkflowDbOperation( + projectDir, + async () => { + const { + claimReservedId, + findMilestoneIds, + getReservedMilestoneIds, + nextMilestoneId, + } = await importLocalModule( + "../../../src/resources/extensions/sf/milestone-ids.js", + ); + const reserved = claimReservedId(); + if (reserved) { + await ensureMilestoneDbRow(reserved); + return reserved; + } + const allIds = [ + ...new Set([ + ...findMilestoneIds(projectDir), + ...getReservedMilestoneIds(), + ]), + ]; + const nextId = nextMilestoneId(allIds); + await ensureMilestoneDbRow(nextId); + return nextId; + }, + ); + return { content: [{ type: "text" as const, text: id }] }; + }, + ); - server.tool( - "sf_requirement_save", - "Record a new requirement to the SF database and regenerate REQUIREMENTS.md.", - requirementSaveParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(requirementSaveSchema, args); - const { projectDir, ...params } = parsed; - await enforceWorkflowWriteGate("sf_requirement_save", projectDir); - const result = await runSerializedWorkflowDbOperation(projectDir, async () => { - const { saveRequirementToDb } = await importLocalModule("../../../src/resources/extensions/sf/db-writer.js"); - return saveRequirementToDb(params, projectDir); - }); - return { content: [{ type: "text" as const, text: `Saved requirement ${result.id}` }] }; - }, - ); + server.tool( + "sf_plan_milestone", + "Write milestone planning state to the SF database and render ROADMAP.md from DB.", + planMilestoneParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(planMilestoneSchema, args); + const { projectDir, ...params } = parsed; + await enforceWorkflowWriteGate( + "sf_plan_milestone", + projectDir, + params.milestoneId, + ); + const { executePlanMilestone } = await getWorkflowToolExecutors(); + return runSerializedWorkflowOperation(() => + executePlanMilestone(params, projectDir), + ); + }, + ); - server.tool( - "sf_save_requirement", - "Alias for sf_requirement_save. Record a new requirement to the SF database and regenerate REQUIREMENTS.md.", - requirementSaveParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(requirementSaveSchema, args); - const { projectDir, ...params } = parsed; - await enforceWorkflowWriteGate("sf_requirement_save", projectDir); - const result = await runSerializedWorkflowDbOperation(projectDir, async () => { - const { saveRequirementToDb } = await importLocalModule("../../../src/resources/extensions/sf/db-writer.js"); - return saveRequirementToDb(params, projectDir); - }); - return { content: [{ type: "text" as const, text: `Saved requirement ${result.id}` }] }; - }, - ); + server.tool( + "sf_plan_slice", + "Write slice/task planning state to the SF database and render plan artifacts from DB.", + planSliceParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(planSliceSchema, args); + const { projectDir, ...params } = parsed; + await enforceWorkflowWriteGate( + "sf_plan_slice", + projectDir, + params.milestoneId, + ); + const { executePlanSlice } = await getWorkflowToolExecutors(); + return runSerializedWorkflowOperation(() => + executePlanSlice(params, projectDir), + ); + }, + ); - server.tool( - "sf_milestone_generate_id", - "Generate the next milestone ID for a new SF milestone.", - milestoneGenerateIdParams, - async (args: Record) => { - const { projectDir } = parseWorkflowArgs(milestoneGenerateIdSchema, args); - await enforceWorkflowWriteGate("sf_milestone_generate_id", projectDir); - const id = await runSerializedWorkflowDbOperation(projectDir, async () => { - const { - claimReservedId, - findMilestoneIds, - getReservedMilestoneIds, - nextMilestoneId, - } = await importLocalModule("../../../src/resources/extensions/sf/milestone-ids.js"); - const reserved = claimReservedId(); - if (reserved) { - await ensureMilestoneDbRow(reserved); - return reserved; - } - const allIds = [...new Set([...findMilestoneIds(projectDir), ...getReservedMilestoneIds()])]; - const nextId = nextMilestoneId(allIds); - await ensureMilestoneDbRow(nextId); - return nextId; - }); - return { content: [{ type: "text" as const, text: id }] }; - }, - ); + server.tool( + "sf_plan_task", + "Write task planning state to the SF database and render tasks/T##-PLAN.md from DB.", + planTaskParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(planTaskSchema, args); + const { projectDir, ...params } = parsed; + await enforceWorkflowWriteGate( + "sf_plan_task", + projectDir, + params.milestoneId, + ); + const result = await runSerializedWorkflowDbOperation( + projectDir, + async () => { + const { handlePlanTask } = await importLocalModule( + "../../../src/resources/extensions/sf/tools/plan-task.js", + ); + return handlePlanTask(params, projectDir); + }, + ); + if ("error" in result) { + throw new Error(result.error); + } + return { + content: [ + { + type: "text" as const, + text: `Planned task ${result.taskId} (${result.sliceId}/${result.milestoneId})`, + }, + ], + }; + }, + ); - server.tool( - "sf_plan_milestone", - "Write milestone planning state to the SF database and render ROADMAP.md from DB.", - planMilestoneParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(planMilestoneSchema, args); - const { projectDir, ...params } = parsed; - await enforceWorkflowWriteGate("sf_plan_milestone", projectDir, params.milestoneId); - const { executePlanMilestone } = await getWorkflowToolExecutors(); - return runSerializedWorkflowOperation(() => executePlanMilestone(params, projectDir)); - }, - ); + server.tool( + "sf_replan_slice", + "Replan a slice after a blocker is discovered, preserving completed tasks and re-rendering PLAN.md + REPLAN.md.", + replanSliceParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(replanSliceSchema, args); + return handleReplanSlice(parsed.projectDir, parsed); + }, + ); - server.tool( - "sf_plan_slice", - "Write slice/task planning state to the SF database and render plan artifacts from DB.", - planSliceParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(planSliceSchema, args); - const { projectDir, ...params } = parsed; - await enforceWorkflowWriteGate("sf_plan_slice", projectDir, params.milestoneId); - const { executePlanSlice } = await getWorkflowToolExecutors(); - return runSerializedWorkflowOperation(() => executePlanSlice(params, projectDir)); - }, - ); + server.tool( + "sf_slice_complete", + "Record a completed slice to the SF database, render SUMMARY.md + UAT.md, and update roadmap projection.", + sliceCompleteParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(sliceCompleteSchema, args); + return handleSliceComplete(parsed.projectDir, parsed); + }, + ); - server.tool( - "sf_plan_task", - "Write task planning state to the SF database and render tasks/T##-PLAN.md from DB.", - planTaskParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(planTaskSchema, args); - const { projectDir, ...params } = parsed; - await enforceWorkflowWriteGate("sf_plan_task", projectDir, params.milestoneId); - const result = await runSerializedWorkflowDbOperation(projectDir, async () => { - const { handlePlanTask } = await importLocalModule("../../../src/resources/extensions/sf/tools/plan-task.js"); - return handlePlanTask(params, projectDir); - }); - if ("error" in result) { - throw new Error(result.error); - } - return { - content: [{ type: "text" as const, text: `Planned task ${result.taskId} (${result.sliceId}/${result.milestoneId})` }], - }; - }, - ); + server.tool( + "sf_skip_slice", + "Mark a slice as skipped so auto-mode advances past it without executing.", + skipSliceParams, + async (args: Record) => { + const { projectDir, milestoneId, sliceId, reason } = parseWorkflowArgs( + skipSliceSchema, + args, + ); + await enforceWorkflowWriteGate("sf_skip_slice", projectDir, milestoneId); + await runSerializedWorkflowDbOperation(projectDir, async () => { + const { getSlice, updateSliceStatus } = await importLocalModule( + "../../../src/resources/extensions/sf/sf-db.js", + ); + const { invalidateStateCache } = await importLocalModule( + "../../../src/resources/extensions/sf/state.js", + ); + const { rebuildState } = await importLocalModule( + "../../../src/resources/extensions/sf/doctor.js", + ); + const slice = getSlice(milestoneId, sliceId); + if (!slice) { + throw new Error( + `Slice ${sliceId} not found in milestone ${milestoneId}`, + ); + } + if (slice.status === "complete" || slice.status === "done") { + throw new Error( + `Slice ${sliceId} is already complete and cannot be skipped`, + ); + } + if (slice.status !== "skipped") { + updateSliceStatus(milestoneId, sliceId, "skipped"); + invalidateStateCache(); + await rebuildState(projectDir); + } + }); + return { + content: [ + { + type: "text" as const, + text: `Skipped slice ${sliceId} (${milestoneId}). Reason: ${reason ?? "User-directed skip"}.`, + }, + ], + }; + }, + ); - server.tool( - "sf_task_plan", - "Alias for sf_plan_task. Write task planning state to the SF database and render tasks/T##-PLAN.md from DB.", - planTaskParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(planTaskSchema, args); - const { projectDir, ...params } = parsed; - await enforceWorkflowWriteGate("sf_plan_task", projectDir, params.milestoneId); - const result = await runSerializedWorkflowDbOperation(projectDir, async () => { - const { handlePlanTask } = await importLocalModule("../../../src/resources/extensions/sf/tools/plan-task.js"); - return handlePlanTask(params, projectDir); - }); - if ("error" in result) { - throw new Error(result.error); - } - return { - content: [{ type: "text" as const, text: `Planned task ${result.taskId} (${result.sliceId}/${result.milestoneId})` }], - }; - }, - ); + server.tool( + "sf_complete_milestone", + "Record a completed milestone to the SF database and render its SUMMARY.md.", + completeMilestoneParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(completeMilestoneSchema, args); + return handleCompleteMilestone(parsed.projectDir, parsed); + }, + ); - server.tool( - "sf_replan_slice", - "Replan a slice after a blocker is discovered, preserving completed tasks and re-rendering PLAN.md + REPLAN.md.", - replanSliceParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(replanSliceSchema, args); - return handleReplanSlice(parsed.projectDir, parsed); - }, - ); + server.tool( + "sf_validate_milestone", + "Validate a milestone, persist validation results to the SF database, and render VALIDATION.md.", + validateMilestoneParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(validateMilestoneSchema, args); + return handleValidateMilestone(parsed.projectDir, parsed); + }, + ); - server.tool( - "sf_slice_replan", - "Alias for sf_replan_slice. Replan a slice after a blocker is discovered.", - replanSliceParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(replanSliceSchema, args); - return handleReplanSlice(parsed.projectDir, parsed); - }, - ); + server.tool( + "sf_reassess_roadmap", + "Reassess a milestone roadmap after a slice completes, writing ASSESSMENT.md and re-rendering ROADMAP.md.", + reassessRoadmapParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(reassessRoadmapSchema, args); + return handleReassessRoadmap(parsed.projectDir, parsed); + }, + ); - server.tool( - "sf_slice_complete", - "Record a completed slice to the SF database, render SUMMARY.md + UAT.md, and update roadmap projection.", - sliceCompleteParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(sliceCompleteSchema, args); - return handleSliceComplete(parsed.projectDir, parsed); - }, - ); + server.tool( + "sf_save_gate_result", + "Save a quality gate result to the SF database.", + saveGateResultParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(saveGateResultSchema, args); + return handleSaveGateResult(parsed.projectDir, parsed); + }, + ); - server.tool( - "sf_skip_slice", - "Mark a slice as skipped so auto-mode advances past it without executing.", - skipSliceParams, - async (args: Record) => { - const { projectDir, milestoneId, sliceId, reason } = parseWorkflowArgs(skipSliceSchema, args); - await enforceWorkflowWriteGate("sf_skip_slice", projectDir, milestoneId); - await runSerializedWorkflowDbOperation(projectDir, async () => { - const { getSlice, updateSliceStatus } = await importLocalModule("../../../src/resources/extensions/sf/sf-db.js"); - const { invalidateStateCache } = await importLocalModule("../../../src/resources/extensions/sf/state.js"); - const { rebuildState } = await importLocalModule("../../../src/resources/extensions/sf/doctor.js"); - const slice = getSlice(milestoneId, sliceId); - if (!slice) { - throw new Error(`Slice ${sliceId} not found in milestone ${milestoneId}`); - } - if (slice.status === "complete" || slice.status === "done") { - throw new Error(`Slice ${sliceId} is already complete and cannot be skipped`); - } - if (slice.status !== "skipped") { - updateSliceStatus(milestoneId, sliceId, "skipped"); - invalidateStateCache(); - await rebuildState(projectDir); - } - }); - return { - content: [{ type: "text" as const, text: `Skipped slice ${sliceId} (${milestoneId}). Reason: ${reason ?? "User-directed skip"}.` }], - }; - }, - ); + server.tool( + "sf_summary_save", + "Save a SF summary/research/context/assessment artifact to the database and disk.", + summarySaveParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(summarySaveSchema, args); + const { + projectDir, + milestone_id, + slice_id, + task_id, + artifact_type, + content, + } = parsed; + await enforceWorkflowWriteGate( + "sf_summary_save", + projectDir, + milestone_id, + ); + const executors = await getWorkflowToolExecutors(); + const supportedArtifactTypes = + getSupportedSummaryArtifactTypes(executors); + if (!supportedArtifactTypes.includes(artifact_type)) { + throw new Error( + `artifact_type must be one of: ${supportedArtifactTypes.join(", ")}`, + ); + } + return runSerializedWorkflowOperation(() => + executors.executeSummarySave( + { milestone_id, slice_id, task_id, artifact_type, content }, + projectDir, + ), + ); + }, + ); - server.tool( - "sf_complete_milestone", - "Record a completed milestone to the SF database and render its SUMMARY.md.", - completeMilestoneParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(completeMilestoneSchema, args); - return handleCompleteMilestone(parsed.projectDir, parsed); - }, - ); + server.tool( + "sf_task_complete", + "Record a completed task to the SF database and render its SUMMARY.md.", + taskCompleteParams, + async (args: Record) => { + const parsed = parseWorkflowArgs(taskCompleteSchema, args); + const { projectDir, ...taskArgs } = parsed; + return handleTaskComplete(projectDir, taskArgs); + }, + ); - server.tool( - "sf_milestone_complete", - "Alias for sf_complete_milestone. Record a completed milestone to the SF database and render its SUMMARY.md.", - completeMilestoneParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(completeMilestoneSchema, args); - return handleCompleteMilestone(parsed.projectDir, parsed); - }, - ); + server.tool( + "sf_milestone_status", + "Read the current status of a milestone and all its slices from the SF database.", + milestoneStatusParams, + async (args: Record) => { + const { projectDir, milestoneId } = parseWorkflowArgs( + milestoneStatusSchema, + args, + ); + await enforceWorkflowWriteGate( + "sf_milestone_status", + projectDir, + milestoneId, + ); + const { executeMilestoneStatus } = await getWorkflowToolExecutors(); + return runSerializedWorkflowOperation(() => + executeMilestoneStatus({ milestoneId }, projectDir), + ); + }, + ); - server.tool( - "sf_validate_milestone", - "Validate a milestone, persist validation results to the SF database, and render VALIDATION.md.", - validateMilestoneParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(validateMilestoneSchema, args); - return handleValidateMilestone(parsed.projectDir, parsed); - }, - ); - - server.tool( - "sf_milestone_validate", - "Alias for sf_validate_milestone. Validate a milestone and render VALIDATION.md.", - validateMilestoneParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(validateMilestoneSchema, args); - return handleValidateMilestone(parsed.projectDir, parsed); - }, - ); - - server.tool( - "sf_reassess_roadmap", - "Reassess a milestone roadmap after a slice completes, writing ASSESSMENT.md and re-rendering ROADMAP.md.", - reassessRoadmapParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(reassessRoadmapSchema, args); - return handleReassessRoadmap(parsed.projectDir, parsed); - }, - ); - - server.tool( - "sf_roadmap_reassess", - "Alias for sf_reassess_roadmap. Reassess a roadmap after slice completion.", - reassessRoadmapParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(reassessRoadmapSchema, args); - return handleReassessRoadmap(parsed.projectDir, parsed); - }, - ); - - server.tool( - "sf_save_gate_result", - "Save a quality gate result to the SF database.", - saveGateResultParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(saveGateResultSchema, args); - return handleSaveGateResult(parsed.projectDir, parsed); - }, - ); - - server.tool( - "sf_summary_save", - "Save a SF summary/research/context/assessment artifact to the database and disk.", - summarySaveParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(summarySaveSchema, args); - const { projectDir, milestone_id, slice_id, task_id, artifact_type, content } = parsed; - await enforceWorkflowWriteGate("sf_summary_save", projectDir, milestone_id); - const executors = await getWorkflowToolExecutors(); - const supportedArtifactTypes = getSupportedSummaryArtifactTypes(executors); - if (!supportedArtifactTypes.includes(artifact_type)) { - throw new Error( - `artifact_type must be one of: ${supportedArtifactTypes.join(", ")}`, - ); - } - return runSerializedWorkflowOperation(() => - executors.executeSummarySave({ milestone_id, slice_id, task_id, artifact_type, content }, projectDir), - ); - }, - ); - - server.tool( - "sf_task_complete", - "Record a completed task to the SF database and render its SUMMARY.md.", - taskCompleteParams, - async (args: Record) => { - const parsed = parseWorkflowArgs(taskCompleteSchema, args); - const { projectDir, ...taskArgs } = parsed; - return handleTaskComplete(projectDir, taskArgs); - }, - ); - - server.tool( - "sf_milestone_status", - "Read the current status of a milestone and all its slices from the SF database.", - milestoneStatusParams, - async (args: Record) => { - const { projectDir, milestoneId } = parseWorkflowArgs(milestoneStatusSchema, args); - await enforceWorkflowWriteGate("sf_milestone_status", projectDir, milestoneId); - const { executeMilestoneStatus } = await getWorkflowToolExecutors(); - return runSerializedWorkflowOperation(() => executeMilestoneStatus({ milestoneId }, projectDir)); - }, - ); - - server.tool( - "sf_journal_query", - "Query the structured event journal for auto-mode iterations.", - journalQueryParams, - async (args: Record) => { - const { projectDir, limit, ...filters } = parseWorkflowArgs(journalQuerySchema, args); - const { queryJournal } = await importLocalModule("../../../src/resources/extensions/sf/journal.js"); - const entries = queryJournal(projectDir, filters).slice(0, limit ?? 100); - if (entries.length === 0) { - return { content: [{ type: "text" as const, text: "No matching journal entries found." }] }; - } - return { content: [{ type: "text" as const, text: JSON.stringify(entries, null, 2) }] }; - }, - ); + server.tool( + "sf_journal_query", + "Query the structured event journal for auto-mode iterations.", + journalQueryParams, + async (args: Record) => { + const { projectDir, limit, ...filters } = parseWorkflowArgs( + journalQuerySchema, + args, + ); + const { queryJournal } = await importLocalModule( + "../../../src/resources/extensions/sf/journal.js", + ); + const entries = queryJournal(projectDir, filters).slice(0, limit ?? 100); + if (entries.length === 0) { + return { + content: [ + { + type: "text" as const, + text: "No matching journal entries found.", + }, + ], + }; + } + return { + content: [ + { type: "text" as const, text: JSON.stringify(entries, null, 2) }, + ], + }; + }, + ); } diff --git a/src/resources/extensions/sf/bootstrap/db-tools.ts b/src/resources/extensions/sf/bootstrap/db-tools.ts index f56f51b78..ad7f15455 100644 --- a/src/resources/extensions/sf/bootstrap/db-tools.ts +++ b/src/resources/extensions/sf/bootstrap/db-tools.ts @@ -26,31 +26,8 @@ import { import { logError } from "../workflow-logger.js"; import { ensureDbOpen } from "./dynamic-tools.js"; -/** - * Register an alias tool that shares the same execute function as its canonical counterpart. - * The alias description and promptGuidelines direct the LLM to prefer the canonical name. - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -- toolDef shape matches ToolDefinition but typing it fully requires generics -function registerAlias( - pi: ExtensionAPI, - toolDef: any, - aliasName: string, - canonicalName: string, -): void { - pi.registerTool({ - ...toolDef, - name: aliasName, - description: - toolDef.description + - ` (alias for ${canonicalName} — prefer the canonical name)`, - promptGuidelines: [ - `Alias for ${canonicalName} — prefer the canonical name.`, - ], - }); -} - export function registerDbTools(pi: ExtensionAPI): void { - // ─── sf_decision_save (formerly sf_save_decision) ───────────────────── + // ─── sf_decision_save ───────────────────────────────────────────────── const decisionSaveExecute = async ( _toolCallId: string, @@ -175,9 +152,8 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(decisionSaveTool); - registerAlias(pi, decisionSaveTool, "sf_save_decision", "sf_decision_save"); - // ─── sf_requirement_update (formerly sf_update_requirement) ─────────── + // ─── sf_requirement_update ──────────────────────────────────────────── const requirementUpdateExecute = async ( _toolCallId: string, @@ -302,12 +278,6 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(requirementUpdateTool); - registerAlias( - pi, - requirementUpdateTool, - "sf_update_requirement", - "sf_requirement_update", - ); // ─── sf_requirement_save ───────────────────────────────────────────── @@ -434,14 +404,8 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(requirementSaveTool); - registerAlias( - pi, - requirementSaveTool, - "sf_save_requirement", - "sf_requirement_save", - ); - // ─── sf_summary_save (formerly sf_save_summary) ────────────────────── + // ─── sf_summary_save ────────────────────────────────────────────────── const summarySaveExecute = async ( _toolCallId: string, @@ -510,7 +474,6 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(summarySaveTool); - registerAlias(pi, summarySaveTool, "sf_save_summary", "sf_summary_save"); // ─── sf_milestone_generate_id ──────────────────────────────────────── @@ -817,7 +780,7 @@ export function registerDbTools(pi: ExtensionAPI): void { pi.registerTool(selfReportTool); - // ─── sf_plan_milestone (sf_milestone_plan alias) ───────────────────── + // ─── sf_plan_milestone ──────────────────────────────────────────────── const planMilestoneExecute = async ( _toolCallId: string, @@ -840,7 +803,6 @@ export function registerDbTools(pi: ExtensionAPI): void { "Use sf_plan_milestone for milestone planning instead of writing ROADMAP.md directly.", "Keep parameters flat and provide the full milestone planning payload. Use either explicit slices or templateId-based scaffolding for common feat/fix/refactor patterns.", "The tool validates input, writes milestone and slice planning data transactionally, renders ROADMAP.md from DB, and clears both state and parse caches after success.", - "Use the canonical name sf_plan_milestone; sf_milestone_plan is only an alias.", ], parameters: Type.Object({ // ── Core identification + content (required) ────────────────────── @@ -1070,14 +1032,8 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(planMilestoneTool); - registerAlias( - pi, - planMilestoneTool, - "sf_milestone_plan", - "sf_plan_milestone", - ); - // ─── sf_plan_slice (sf_slice_plan alias) ───────────────────────────── + // ─── sf_plan_slice ──────────────────────────────────────────────────── const planSliceExecute = async ( _toolCallId: string, @@ -1100,7 +1056,6 @@ export function registerDbTools(pi: ExtensionAPI): void { "Use sf_plan_slice for slice planning instead of writing S##-PLAN.md or task PLAN files directly.", "Keep parameters flat and provide the full slice planning payload, including tasks.", "The tool validates input, requires an existing parent slice, writes slice/task planning data, renders PLAN.md and task plan files from DB, and clears both state and parse caches after success.", - "Use the canonical name sf_plan_slice; sf_slice_plan is only an alias.", ], parameters: Type.Object({ // ── Core identification + content (required) ────────────────────── @@ -1231,9 +1186,8 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(planSliceTool); - registerAlias(pi, planSliceTool, "sf_slice_plan", "sf_plan_slice"); - // ─── sf_plan_task (sf_task_plan alias) ─────────────────────────────── + // ─── sf_plan_task ───────────────────────────────────────────────────── const planTaskExecute = async ( _toolCallId: string, @@ -1309,7 +1263,6 @@ export function registerDbTools(pi: ExtensionAPI): void { "Use sf_plan_task for task planning instead of writing tasks/T##-PLAN.md directly.", "Keep parameters flat and provide the full task planning payload.", "The tool validates input, requires an existing parent slice, writes task planning data, renders the task PLAN file from DB, and clears both state and parse caches after success.", - "Use the canonical name sf_plan_task; sf_task_plan is only an alias.", ], parameters: Type.Object({ milestoneId: Type.String({ description: "Milestone ID (e.g. M001)" }), @@ -1336,7 +1289,6 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(planTaskTool); - registerAlias(pi, planTaskTool, "sf_task_plan", "sf_plan_task"); // ─── sf_task_complete ───────────────────────────────────────────────── @@ -1824,14 +1776,8 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(milestoneCompleteTool); - registerAlias( - pi, - milestoneCompleteTool, - "sf_milestone_complete", - "sf_complete_milestone", - ); - // ─── sf_validate_milestone (sf_milestone_validate alias) ───────────── + // ─── sf_validate_milestone ──────────────────────────────────────────── const milestoneValidateExecute = async ( _toolCallId: string, @@ -1898,14 +1844,8 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(milestoneValidateTool); - registerAlias( - pi, - milestoneValidateTool, - "sf_milestone_validate", - "sf_validate_milestone", - ); - // ─── sf_replan_slice (sf_slice_replan alias) ───────────────────────── + // ─── sf_replan_slice ────────────────────────────────────────────────── const replanSliceExecute = async ( _toolCallId: string, @@ -1927,7 +1867,7 @@ export function registerDbTools(pi: ExtensionAPI): void { promptSnippet: "Replan a SF slice with structural enforcement of completed tasks", promptGuidelines: [ - "Use sf_replan_slice (canonical) or sf_slice_replan (alias) when a blocker is discovered and the slice plan needs rewriting.", + "Use sf_replan_slice when a blocker is discovered and the slice plan needs rewriting.", "The tool structurally enforces that completed tasks cannot be updated or removed — violations return specific error payloads naming the blocked task ID.", "Parameters: milestoneId, sliceId, blockerTaskId, blockerDescription, whatChanged, optional slice-level planning/ceremony updates, updatedTasks (array), removedTaskIds (array).", "updatedTasks items: taskId, title, description, estimate, files, verify, inputs, expectedOutput.", @@ -2054,9 +1994,8 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(replanSliceTool); - registerAlias(pi, replanSliceTool, "sf_slice_replan", "sf_replan_slice"); - // ─── sf_reassess_roadmap (sf_roadmap_reassess alias) ───────────────── + // ─── sf_reassess_roadmap ────────────────────────────────────────────── const reassessRoadmapExecute = async ( _toolCallId: string, @@ -2078,7 +2017,7 @@ export function registerDbTools(pi: ExtensionAPI): void { promptSnippet: "Reassess a SF roadmap with structural enforcement of completed slices", promptGuidelines: [ - "Use sf_reassess_roadmap (canonical) or sf_roadmap_reassess (alias) after a slice completes to reassess the roadmap.", + "Use sf_reassess_roadmap after a slice completes to reassess the roadmap.", "The tool structurally enforces that completed slices cannot be modified or removed — violations return specific error payloads naming the blocked slice ID.", "Parameters: milestoneId, completedSliceId, verdict, assessment, sliceChanges (object with modified, added, removed arrays).", "sliceChanges.modified items: sliceId, title, risk (optional), depends (optional), demo (optional).", @@ -2138,12 +2077,6 @@ export function registerDbTools(pi: ExtensionAPI): void { }; pi.registerTool(reassessRoadmapTool); - registerAlias( - pi, - reassessRoadmapTool, - "sf_roadmap_reassess", - "sf_reassess_roadmap", - ); // ─── sf_save_gate_result ────────────────────────────────────────────── diff --git a/src/resources/extensions/sf/constants.ts b/src/resources/extensions/sf/constants.ts index 14f6ffa77..62bb51644 100644 --- a/src/resources/extensions/sf/constants.ts +++ b/src/resources/extensions/sf/constants.ts @@ -36,30 +36,22 @@ export const CACHE_MAX = 50; * * Included tools and why: * - sf_summary_save: writes CONTEXT.md artifacts (all discuss prompts) - * - sf_save_summary: alias for above * - sf_decision_save: records decisions (discuss.md output phase) - * - sf_save_decision: alias for above * - sf_plan_milestone: writes roadmap (discuss.md single/multi milestone) - * - sf_milestone_plan: alias for above * - sf_milestone_generate_id: generates milestone IDs (discuss.md multi-milestone) * - sf_requirement_update: updates requirements during discuss - * - sf_update_requirement: alias for above */ export const DISCUSS_TOOLS_ALLOWLIST: readonly string[] = [ // Context / summary writing "sf_summary_save", - "sf_save_summary", // Decision recording "sf_decision_save", - "sf_save_decision", // Milestone planning (needed for discuss.md output phase) "sf_plan_milestone", - "sf_milestone_plan", // Milestone ID generation (multi-milestone flow) "sf_milestone_generate_id", // Requirement updates "sf_requirement_update", - "sf_update_requirement", ]; /** @@ -76,7 +68,6 @@ export const DISCUSS_TOOLS_ALLOWLIST: readonly string[] = [ */ export const RESEARCH_TOOLS_ALLOWLIST: readonly string[] = [ "sf_summary_save", - "sf_save_summary", "sf_self_report", ]; diff --git a/src/resources/extensions/sf/tests/discuss-tool-scoping.test.ts b/src/resources/extensions/sf/tests/discuss-tool-scoping.test.ts index f598320b9..36893643f 100644 --- a/src/resources/extensions/sf/tests/discuss-tool-scoping.test.ts +++ b/src/resources/extensions/sf/tests/discuss-tool-scoping.test.ts @@ -35,19 +35,13 @@ const guidedFlowPath = join(__dirname, "..", "guided-flow.ts"); /** Tools that are only needed during planning, execution, or completion phases */ const HEAVY_TOOLS = [ "sf_plan_slice", - "sf_slice_plan", "sf_plan_task", - "sf_task_plan", "sf_task_complete", "sf_slice_complete", "sf_complete_milestone", - "sf_milestone_complete", "sf_validate_milestone", - "sf_milestone_validate", "sf_replan_slice", - "sf_slice_replan", "sf_reassess_roadmap", - "sf_roadmap_reassess", "sf_save_gate_result", ]; @@ -90,10 +84,10 @@ describe("discuss tool scoping (#2949)", () => { test("DISCUSS_TOOLS_ALLOWLIST is significantly smaller than full tool set", () => { // Full set is 27 DB tools + dynamic + journal = 33+ - // Discuss set should be roughly 10 SF tools (5 canonical + 5 aliases) + // Discuss set should be the small canonical subset referenced by discuss prompts. assert.ok( - DISCUSS_TOOLS_ALLOWLIST.length <= 12, - `allowlist should have at most 12 SF tools, got ${DISCUSS_TOOLS_ALLOWLIST.length}`, + DISCUSS_TOOLS_ALLOWLIST.length <= 6, + `allowlist should have at most 6 SF tools, got ${DISCUSS_TOOLS_ALLOWLIST.length}`, ); }); @@ -137,7 +131,6 @@ describe("research tool scoping", () => { test("RESEARCH_TOOLS_ALLOWLIST permits only summary save and self-report SF tools", () => { assert.deepEqual(RESEARCH_TOOLS_ALLOWLIST, [ "sf_summary_save", - "sf_save_summary", "sf_self_report", ]); for (const planningTool of [ diff --git a/src/resources/extensions/sf/tests/tool-naming.test.ts b/src/resources/extensions/sf/tests/tool-naming.test.ts index 02ca160aa..603419ba9 100644 --- a/src/resources/extensions/sf/tests/tool-naming.test.ts +++ b/src/resources/extensions/sf/tests/tool-naming.test.ts @@ -1,15 +1,13 @@ -// tool-naming — Verifies canonical + supported alias tool registration for SF DB tools. +// tool-naming — Verifies canonical-only tool registration for SF DB tools. // -// DB tools with supported aliases must register under both names. -// Completion tools are canonical-only; do not reintroduce legacy aliases. +// SF workflow tools have one public name per operation. Do not reintroduce +// compatibility aliases; they make prompts, MCP, and UI cards drift. import assert from "node:assert/strict"; +import { describe, test } from "vitest"; import { registerDbTools } from "../bootstrap/db-tools.ts"; // ─── Mock PI ────────────────────────────────────────────────────────────────── -import { test } from "vitest"; - -test("tool naming.test", () => { function makeMockPi() { const tools: any[] = []; @@ -19,212 +17,154 @@ function makeMockPi() { } as any; } -// ─── Rename map ─────────────────────────────────────────────────────────────── +// ─── Canonical surface ─────────────────────────────────────────────────────── -const RENAME_MAP: Array<{ canonical: string; alias: string }> = [ - { canonical: "sf_decision_save", alias: "sf_save_decision" }, - { canonical: "sf_requirement_update", alias: "sf_update_requirement" }, - { canonical: "sf_requirement_save", alias: "sf_save_requirement" }, - { canonical: "sf_summary_save", alias: "sf_save_summary" }, - { canonical: "sf_plan_milestone", alias: "sf_milestone_plan" }, - { canonical: "sf_plan_slice", alias: "sf_slice_plan" }, - { canonical: "sf_plan_task", alias: "sf_task_plan" }, - { canonical: "sf_replan_slice", alias: "sf_slice_replan" }, - { canonical: "sf_reassess_roadmap", alias: "sf_roadmap_reassess" }, - { canonical: "sf_complete_milestone", alias: "sf_milestone_complete" }, - { canonical: "sf_validate_milestone", alias: "sf_milestone_validate" }, -]; - -const EXTRA_DB_TOOLS = [ - "sf_self_report", - "sf_skip_slice", - "sf_save_gate_result", +const CANONICAL_DB_TOOLS = [ + "sf_decision_save", + "sf_requirement_update", + "sf_requirement_save", + "sf_summary_save", "sf_milestone_generate_id", + "sf_self_report", + "sf_plan_milestone", + "sf_plan_slice", + "sf_plan_task", "sf_task_complete", "sf_slice_complete", + "sf_skip_slice", + "sf_complete_milestone", + "sf_validate_milestone", + "sf_replan_slice", + "sf_reassess_roadmap", + "sf_save_gate_result", ] as const; const REMOVED_TOOL_ALIASES = [ + "sf_save_decision", + "sf_update_requirement", + "sf_save_requirement", + "sf_save_summary", + "sf_generate_milestone_id", + "sf_milestone_plan", + "sf_slice_plan", + "sf_task_plan", "sf_complete_task", "sf_complete_slice", - "sf_generate_milestone_id", + "sf_milestone_complete", + "sf_milestone_validate", + "sf_slice_replan", + "sf_roadmap_reassess", ] as const; -// ─── Registration count ────────────────────────────────────────────────────── +describe("SF workflow tool naming", () => { + test("registerDbTools_registers_only_the_canonical_db_tool_surface", () => { + const pi = makeMockPi(); + registerDbTools(pi); -console.log("\n── Tool naming: registration count ──"); - -const pi = makeMockPi(); -registerDbTools(pi); - -assert.deepStrictEqual( - pi.tools.length, - RENAME_MAP.length * 2 + EXTRA_DB_TOOLS.length, - "Should register all canonical tools, aliases, and non-aliased DB helpers", -); - -// ─── Both names exist for each pair ────────────────────────────────────────── - -console.log("\n── Tool naming: canonical and alias names exist ──"); - -for (const { canonical, alias } of RENAME_MAP) { - const canonicalTool = pi.tools.find((t: any) => t.name === canonical); - const aliasTool = pi.tools.find((t: any) => t.name === alias); - - assert.ok( - canonicalTool !== undefined, - `Canonical tool "${canonical}" should be registered`, - ); - assert.ok( - aliasTool !== undefined, - `Alias tool "${alias}" should be registered`, - ); -} - -for (const name of EXTRA_DB_TOOLS) { - assert.ok( - pi.tools.some((t: any) => t.name === name), - `Extra DB tool "${name}" should be registered`, - ); -} - -for (const name of REMOVED_TOOL_ALIASES) { - assert.ok( - !pi.tools.some((t: any) => t.name === name), - `Removed tool alias "${name}" should not be registered`, - ); -} - -// ─── Execute function identity ─────────────────────────────────────────────── - -console.log("\n── Tool naming: execute function identity (===) ──"); - -for (const { canonical, alias } of RENAME_MAP) { - const canonicalTool = pi.tools.find((t: any) => t.name === canonical); - const aliasTool = pi.tools.find((t: any) => t.name === alias); - - if (canonicalTool && aliasTool) { - assert.ok( - canonicalTool.execute === aliasTool.execute, - `"${canonical}" and "${alias}" should share the same execute function reference`, + assert.deepStrictEqual( + pi.tools.length, + CANONICAL_DB_TOOLS.length, + "Should register exactly the canonical DB tool surface", ); - } -} -// ─── Alias descriptions include "(alias for ...)" ─────────────────────────── + for (const name of CANONICAL_DB_TOOLS) { + assert.ok( + pi.tools.some((t: any) => t.name === name), + `Canonical DB tool "${name}" should be registered`, + ); + } -console.log("\n── Tool naming: alias descriptions ──"); + for (const name of REMOVED_TOOL_ALIASES) { + assert.ok( + !pi.tools.some((t: any) => t.name === name), + `Removed tool alias "${name}" should not be registered`, + ); + } + }); -for (const { canonical, alias } of RENAME_MAP) { - const aliasTool = pi.tools.find((t: any) => t.name === alias); + test("canonical_tools_do_not_advertise_alias_names", () => { + const pi = makeMockPi(); + registerDbTools(pi); - if (aliasTool) { - assert.ok( - aliasTool.description.includes(`alias for ${canonical}`), - `Alias "${alias}" description should include "alias for ${canonical}"`, + for (const canonical of CANONICAL_DB_TOOLS) { + const canonicalTool = pi.tools.find((t: any) => t.name === canonical); + + if (canonicalTool?.promptGuidelines) { + const guidelinesText = canonicalTool.promptGuidelines.join(" "); + assert.ok( + guidelinesText.includes(canonical), + `Canonical tool "${canonical}" promptGuidelines should reference its own name`, + ); + assert.ok( + !/\balias\b/i.test(guidelinesText), + `Canonical tool "${canonical}" promptGuidelines should not mention aliases`, + ); + } + } + }); + + test("custom_tool_cards_render_canonical_sf_prefixed_names", () => { + const pi = makeMockPi(); + registerDbTools(pi); + + const fakeTheme = { + bold: (text: string) => text, + fg: (_name: string, text: string) => text, + }; + for (const tool of pi.tools.filter( + (t: any) => typeof t.renderCall === "function", + )) { + const callComponent = tool.renderCall({}, fakeTheme); + assert.match( + String(callComponent.text), + /^sf_[a-z_]+/, + `Custom renderer for "${tool.name}" should display a canonical-looking sf_* tool name`, + ); + } + }); + + test("sf_plan_milestone_renderer_summarizes_work", () => { + const pi = makeMockPi(); + registerDbTools(pi); + + const planMilestoneTool = pi.tools.find( + (t: any) => t.name === "sf_plan_milestone", ); - } -} + assert.equal(typeof planMilestoneTool?.renderCall, "function"); + assert.equal(typeof planMilestoneTool?.renderResult, "function"); -// ─── Canonical tools have proper promptGuidelines ──────────────────────────── - -console.log( - "\n── Tool naming: canonical promptGuidelines use canonical name ──", -); - -for (const { canonical } of RENAME_MAP) { - const canonicalTool = pi.tools.find((t: any) => t.name === canonical); - - if (canonicalTool) { - const guidelinesText = canonicalTool.promptGuidelines.join(" "); - assert.ok( - guidelinesText.includes(canonical), - `Canonical tool "${canonical}" promptGuidelines should reference its own name`, - ); - } -} - -// ─── Alias promptGuidelines direct to canonical ────────────────────────────── - -console.log( - "\n── Tool naming: alias promptGuidelines redirect to canonical ──", -); - -for (const { canonical, alias } of RENAME_MAP) { - const aliasTool = pi.tools.find((t: any) => t.name === alias); - - if (aliasTool) { - const guidelinesText = aliasTool.promptGuidelines.join(" "); - assert.ok( - guidelinesText.includes(`Alias for ${canonical}`), - `Alias "${alias}" promptGuidelines should say "Alias for ${canonical}"`, - ); - } -} - -// guard: Custom tool cards must not invent third spellings such as -// "milestone_generate_id"; render the visible call name with the sf_ prefix. -console.log("\n── Tool naming: custom renderers show sf_* tool names ──"); - -{ - const fakeTheme = { - bold: (text: string) => text, - fg: (_name: string, text: string) => text, - }; - for (const tool of pi.tools.filter((t: any) => typeof t.renderCall === "function")) { - const callComponent = tool.renderCall({}, fakeTheme); - assert.match( - String(callComponent.text), - /^sf_[a-z_]+/, - `Custom renderer for "${tool.name}" should display a canonical-looking sf_* tool name`, - ); - } -} - -// ─── High-signal tool rendering ────────────────────────────────────────────── - -console.log("\n── Tool naming: milestone planning renderer summarizes work ──"); - -{ - const planMilestoneTool = pi.tools.find( - (t: any) => t.name === "sf_plan_milestone", - ); - assert.equal(typeof planMilestoneTool?.renderCall, "function"); - assert.equal(typeof planMilestoneTool?.renderResult, "function"); - - const fakeTheme = { - bold: (text: string) => text, - fg: (_name: string, text: string) => text, - }; - const callComponent = planMilestoneTool.renderCall( - { - milestoneId: "M008", - title: "Workflow polish", - slices: [{ sliceId: "S01", title: "Improve tool cards" }], - }, - fakeTheme, - ); - assert.match(callComponent.text, /M008: Workflow polish/); - assert.match(callComponent.text, /1 slice/); - - const resultComponent = planMilestoneTool.renderResult( - { - details: { + const fakeTheme = { + bold: (text: string) => text, + fg: (_name: string, text: string) => text, + }; + const callComponent = planMilestoneTool.renderCall( + { milestoneId: "M008", title: "Workflow polish", - sliceCount: 1, - firstSliceId: "S01", - firstSliceTitle: "Improve tool cards", + slices: [{ sliceId: "S01", title: "Improve tool cards" }], }, - }, - {}, - fakeTheme, - ); - assert.match(resultComponent.text, /M008 planned: Workflow polish/); - assert.match(resultComponent.text, /1 slice/); - assert.match(resultComponent.text, /next S01: Improve tool cards/); -} + fakeTheme, + ); + assert.match(callComponent.text, /M008: Workflow polish/); + assert.match(callComponent.text, /1 slice/); + + const resultComponent = planMilestoneTool.renderResult( + { + details: { + milestoneId: "M008", + title: "Workflow polish", + sliceCount: 1, + firstSliceId: "S01", + firstSliceTitle: "Improve tool cards", + }, + }, + {}, + fakeTheme, + ); + assert.match(resultComponent.text, /M008 planned: Workflow polish/); + assert.match(resultComponent.text, /1 slice/); + assert.match(resultComponent.text, /next S01: Improve tool cards/); + }); +}); // ═══════════════════════════════════════════════════════════════════════════ - -}); diff --git a/src/resources/extensions/sf/workflow-mcp.ts b/src/resources/extensions/sf/workflow-mcp.ts index 549db2305..cc74d59a9 100644 --- a/src/resources/extensions/sf/workflow-mcp.ts +++ b/src/resources/extensions/sf/workflow-mcp.ts @@ -25,10 +25,9 @@ const MCP_WORKFLOW_TOOL_SURFACE = new Set([ "sf_decision_save", "sf_complete_milestone", "sf_journal_query", - "sf_milestone_complete", "sf_milestone_generate_id", "sf_milestone_status", - "sf_milestone_validate", + "sf_validate_milestone", "sf_plan_task", "sf_plan_milestone", "sf_plan_slice", @@ -36,18 +35,11 @@ const MCP_WORKFLOW_TOOL_SURFACE = new Set([ "sf_reassess_roadmap", "sf_requirement_save", "sf_requirement_update", - "sf_roadmap_reassess", - "sf_save_decision", "sf_save_gate_result", - "sf_save_requirement", "sf_skip_slice", - "sf_slice_replan", "sf_slice_complete", "sf_summary_save", - "sf_task_plan", "sf_task_complete", - "sf_update_requirement", - "sf_validate_milestone", ]); function parseLookupOutput(output: Buffer | string): string { diff --git a/src/resources/extensions/sf/write-intercept.ts b/src/resources/extensions/sf/write-intercept.ts index 7d7f995d0..30f107cf6 100644 --- a/src/resources/extensions/sf/write-intercept.ts +++ b/src/resources/extensions/sf/write-intercept.ts @@ -98,7 +98,7 @@ function matchesBlockedPattern(path: string): boolean { export const BLOCKED_WRITE_ERROR = `Direct writes to .sf/STATE.md and .sf/sf.db are blocked. Use engine tool calls instead: - To complete a task: call sf_task_complete(milestone_id, slice_id, task_id, summary) - To complete a slice: call sf_slice_complete(milestone_id, slice_id, summary, uat_result) -- To save a decision: call sf_save_decision(scope, decision, choice, rationale) +- To save a decision: call sf_decision_save(scope, decision, choice, rationale) - To start a task: call sf_start_task(milestone_id, slice_id, task_id) - To record verification: call sf_record_verification(milestone_id, slice_id, task_id, evidence) - To report a blocker: call sf_report_blocker(milestone_id, slice_id, task_id, description)`;