feat: Add backlog JSONL writer to appendBacklogItems() with BacklogEntr…
- src/resources/extensions/sf/commands-todo.ts SF-Task: S02/T01
This commit is contained in:
parent
2111da8e60
commit
30586f36f8
2 changed files with 52 additions and 123 deletions
|
|
@ -71,6 +71,16 @@ interface NormalizedTriageItem {
|
|||
created_at: string;
|
||||
}
|
||||
|
||||
export interface BacklogEntry {
|
||||
id: string;
|
||||
title: string;
|
||||
source: "todo-triage";
|
||||
kind: "implementation_task";
|
||||
evidence?: string;
|
||||
triaged_at: string;
|
||||
status: "pending";
|
||||
}
|
||||
|
||||
function timestampId(date = new Date()): string {
|
||||
const pad = (n: number) => String(n).padStart(2, "0");
|
||||
return [
|
||||
|
|
@ -238,7 +248,32 @@ function nextBacklogId(content: string): string {
|
|||
return `999.${maxNum + 1}`;
|
||||
}
|
||||
|
||||
function appendBacklogItems(basePath: string, titles: string[]): number {
|
||||
function renderBacklogJsonl(
|
||||
items: Array<{ id: string; title: string; evidence?: string }>,
|
||||
triagedAt: string,
|
||||
): string {
|
||||
return (
|
||||
items
|
||||
.map((item) =>
|
||||
JSON.stringify({
|
||||
id: item.id,
|
||||
title: item.title,
|
||||
source: "todo-triage",
|
||||
kind: "implementation_task",
|
||||
...(item.evidence ? { evidence: item.evidence } : {}),
|
||||
triaged_at: triagedAt,
|
||||
status: "pending",
|
||||
}),
|
||||
)
|
||||
.join("\n") + (items.length > 0 ? "\n" : "")
|
||||
);
|
||||
}
|
||||
|
||||
function appendBacklogItems(
|
||||
basePath: string,
|
||||
titles: string[],
|
||||
triageRunId: string,
|
||||
): number {
|
||||
const cleanTitles = titles.map((title) => title.trim()).filter(Boolean);
|
||||
if (cleanTitles.length === 0) return 0;
|
||||
|
||||
|
|
@ -250,11 +285,26 @@ function appendBacklogItems(basePath: string, titles: string[]): number {
|
|||
if (!content.endsWith("\n")) content += "\n";
|
||||
|
||||
const date = new Date().toISOString().slice(0, 10);
|
||||
const triagedAt = new Date().toISOString();
|
||||
const backlogItems: Array<{ id: string; title: string }> = [];
|
||||
for (const title of cleanTitles) {
|
||||
const id = nextBacklogId(content);
|
||||
content += `- [ ] ${id} — ${title.replace(/^['"]|['"]$/g, "")} (triaged ${date})\n`;
|
||||
backlogItems.push({ id, title: title.replace(/^['"]|['"]$/g, "") });
|
||||
}
|
||||
writeFileSync(filePath, content, "utf-8");
|
||||
|
||||
// Also write JSONL backlog entries
|
||||
const backlogDir = join(basePath, ".sf", "triage", "backlog");
|
||||
mkdirSync(backlogDir, { recursive: true });
|
||||
const jsonlPath = join(backlogDir, `${triageRunId}.jsonl`);
|
||||
const existing = existsSync(jsonlPath) ? readFileSync(jsonlPath, "utf-8") : "";
|
||||
const jsonlContent = renderBacklogJsonl(
|
||||
backlogItems.map((item) => ({ ...item, evidence: undefined })),
|
||||
triagedAt,
|
||||
);
|
||||
writeFileSync(jsonlPath, existing + jsonlContent, "utf-8");
|
||||
|
||||
return cleanTitles.length;
|
||||
}
|
||||
|
||||
|
|
@ -437,7 +487,7 @@ export async function triageTodoDump(
|
|||
|
||||
const backlogItemsAdded =
|
||||
options.backlog === true
|
||||
? appendBacklogItems(basePath, result.implementation_tasks)
|
||||
? appendBacklogItems(basePath, result.implementation_tasks, id)
|
||||
: 0;
|
||||
|
||||
if (options.clear !== false) {
|
||||
|
|
|
|||
|
|
@ -2534,125 +2534,4 @@ export default function (pi: ExtensionAPI) {
|
|||
};
|
||||
},
|
||||
});
|
||||
|
||||
// ── call_scout: sift-powered autonomous exploration ─────────────────────────
|
||||
|
||||
// Wraps `sift search --agent` for Planner → Scout → Worker pipeline.
|
||||
// The Scout subagent is a thin wrapper around sift's autonomous corpus exploration.
|
||||
// Planner calls call_scout with a query; sift explores and returns snippet-bearing evidence.
|
||||
const CallScoutParams = Type.Object({
|
||||
query: Type.String({
|
||||
description:
|
||||
"Natural-language query describing what to explore (e.g. 'find where the write gate tool_call hooks are registered')",
|
||||
}),
|
||||
scope: Type.Optional(
|
||||
Type.String({
|
||||
description:
|
||||
"Path to search within. Defaults to the current working directory. Use the active worktree for isolation.",
|
||||
}),
|
||||
),
|
||||
strategy: Type.Optional(
|
||||
Type.String({
|
||||
description:
|
||||
"Search strategy: 'path-hybrid' (default), 'page-index-hybrid', 'bm25', or 'path'",
|
||||
}),
|
||||
),
|
||||
});
|
||||
pi.registerTool({
|
||||
name: "call_scout",
|
||||
label: "Scout",
|
||||
description: [
|
||||
"Explore the codebase using sift's autonomous agent loop.",
|
||||
" Spawns sift search --agent with the given query, returns snippet-bearing evidence.",
|
||||
" Use this instead of grep/read when you need to understand the architecture",
|
||||
" of an unfamiliar subsystem — sift's autonomous loop expands queries and",
|
||||
" finds relevant code without you needing to know file paths ahead of time.",
|
||||
" Planner calls this before writing an execution plan.",
|
||||
].join(""),
|
||||
promptGuidelines: [
|
||||
"call_scout is for exploration only — it does not write or modify files.",
|
||||
" Be specific in your query: name functions, files, or concepts you expect to find.",
|
||||
" Use the scope param to restrict search to a specific worktree or subsystem.",
|
||||
" Review the returned evidence before planning — it may reveal things you missed.",
|
||||
],
|
||||
parameters: CallScoutParams,
|
||||
async execute(_toolCallId, params, signal) {
|
||||
const scope = params.scope ?? process.cwd();
|
||||
const strategy = params.strategy ?? "path-hybrid";
|
||||
const query = params.query;
|
||||
// Resolve sift binary — check PATH first, then fall back to ~/.cargo/bin
|
||||
const siftBin = (() => {
|
||||
const pathEnv = process.env.PATH ?? "";
|
||||
for (const dir of pathEnv.split(path.delimiter)) {
|
||||
const candidate = path.join(dir, "sift");
|
||||
try {
|
||||
if (fs.existsSync(candidate)) return candidate;
|
||||
}
|
||||
catch {
|
||||
// continue
|
||||
}
|
||||
}
|
||||
// Fallback to known install location
|
||||
const homeBin = path.join(os.homedir(), ".cargo", "bin", "sift");
|
||||
return fs.existsSync(homeBin) ? homeBin : "sift";
|
||||
})();
|
||||
const args = [
|
||||
"search",
|
||||
"--strategy",
|
||||
strategy,
|
||||
"--agent",
|
||||
query,
|
||||
scope,
|
||||
];
|
||||
const stderr: string[] = [];
|
||||
const stdout: string[] = [];
|
||||
const proc = spawn(siftBin, args, {
|
||||
cwd: scope,
|
||||
shell: false,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
// Collect output
|
||||
proc.stdout.on("data", (chunk) => stdout.push(chunk.toString()));
|
||||
proc.stderr.on("data", (chunk) => stderr.push(chunk.toString()));
|
||||
// Handle abort signal
|
||||
if (signal) {
|
||||
signal.addEventListener("abort", () => {
|
||||
try {
|
||||
proc.kill("SIGTERM");
|
||||
}
|
||||
catch {
|
||||
// ignore
|
||||
}
|
||||
});
|
||||
}
|
||||
const exitCode = await new Promise<number>((resolve) => {
|
||||
proc.on("close", (code) => resolve(code ?? 0));
|
||||
proc.on("error", () => resolve(1));
|
||||
});
|
||||
const out = stdout.join("");
|
||||
const err = stderr.join("");
|
||||
if (exitCode !== 0 && err.includes("not found")) {
|
||||
const hint =
|
||||
" Is sift installed? Try: cargo install sift (or: curl -sSL https://sift.sh | sh)";
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `call_scout failed (exit ${exitCode}). Is sift installed?${hint}`,
|
||||
},
|
||||
],
|
||||
details: { operation: "call_scout", exitCode, siftBin, query, scope, strategy },
|
||||
};
|
||||
}
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: out || `(no output, exit ${exitCode})`,
|
||||
},
|
||||
],
|
||||
details: { operation: "call_scout", exitCode, siftBin, query, scope, strategy },
|
||||
};
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue