fix(ci): harden graph fallback and update regression guards
This commit is contained in:
parent
249bf11196
commit
f9926996e5
7 changed files with 175 additions and 28 deletions
|
|
@ -7,13 +7,144 @@
|
|||
*/
|
||||
|
||||
import { logWarning } from "./workflow-logger.js";
|
||||
import type { GraphQueryResult, GraphStatusResult } from "@gsd-build/mcp-server";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
interface GraphNode {
|
||||
id: string;
|
||||
label: string;
|
||||
type: string;
|
||||
confidence: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
interface GraphEdge {
|
||||
from: string;
|
||||
to: string;
|
||||
type: string;
|
||||
}
|
||||
|
||||
interface GraphQueryResult {
|
||||
nodes: GraphNode[];
|
||||
edges: GraphEdge[];
|
||||
}
|
||||
|
||||
interface GraphStatusResult {
|
||||
exists: boolean;
|
||||
stale: boolean;
|
||||
ageHours?: number;
|
||||
}
|
||||
|
||||
interface GraphApi {
|
||||
graphQuery: (projectDir: string, term: string, budget?: number) => Promise<GraphQueryResult>;
|
||||
graphStatus: (projectDir: string) => Promise<GraphStatusResult>;
|
||||
}
|
||||
|
||||
interface GraphFileShape {
|
||||
nodes: GraphNode[];
|
||||
edges: GraphEdge[];
|
||||
builtAt?: string;
|
||||
}
|
||||
|
||||
let cachedGraphApi: GraphApi | null = null;
|
||||
let resolvedGraphApi = false;
|
||||
|
||||
export interface GraphSubgraphOptions {
|
||||
/** Budget in tokens passed to graphQuery (1 node ≈ 20 tokens, 1 edge ≈ 10 tokens) */
|
||||
budget: number;
|
||||
}
|
||||
|
||||
function readGraphFile(projectDir: string): GraphFileShape | null {
|
||||
try {
|
||||
const graphPath = join(projectDir, ".gsd", "graphs", "graph.json");
|
||||
const raw = readFileSync(graphPath, "utf-8");
|
||||
const parsed = JSON.parse(raw) as Partial<GraphFileShape>;
|
||||
const nodes = Array.isArray(parsed.nodes) ? parsed.nodes : [];
|
||||
const edges = Array.isArray(parsed.edges) ? parsed.edges : [];
|
||||
return { nodes, edges, builtAt: typeof parsed.builtAt === "string" ? parsed.builtAt : undefined };
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function fallbackGraphQuery(projectDir: string, term: string, budget = 3000): Promise<GraphQueryResult> {
|
||||
const graph = readGraphFile(projectDir);
|
||||
if (!graph) return { nodes: [], edges: [] };
|
||||
|
||||
const needle = term.trim().toLowerCase();
|
||||
const matches = graph.nodes.filter((node) => {
|
||||
const hay = [node.id, node.label, node.description].filter(Boolean).join(" ").toLowerCase();
|
||||
return hay.includes(needle);
|
||||
});
|
||||
|
||||
const maxNodes = Math.max(1, Math.floor(Math.max(1, budget) / 20));
|
||||
const selectedIds = new Set(matches.slice(0, maxNodes).map((node) => node.id));
|
||||
const nodeById = new Map(graph.nodes.map((node) => [node.id, node] as const));
|
||||
|
||||
// Pull one-hop neighbors so relation context survives even when the term
|
||||
// matches only one side of an edge.
|
||||
for (const edge of graph.edges) {
|
||||
if (selectedIds.size >= maxNodes) break;
|
||||
const touchesSelection = selectedIds.has(edge.from) || selectedIds.has(edge.to);
|
||||
if (!touchesSelection) continue;
|
||||
if (selectedIds.has(edge.from) && !selectedIds.has(edge.to) && nodeById.has(edge.to)) {
|
||||
selectedIds.add(edge.to);
|
||||
} else if (selectedIds.has(edge.to) && !selectedIds.has(edge.from) && nodeById.has(edge.from)) {
|
||||
selectedIds.add(edge.from);
|
||||
}
|
||||
}
|
||||
|
||||
const nodes = graph.nodes.filter((node) => selectedIds.has(node.id));
|
||||
|
||||
const remainingBudget = Math.max(0, budget - nodes.length * 20);
|
||||
const maxEdges = Math.floor(remainingBudget / 10);
|
||||
const edges = graph.edges
|
||||
.filter((edge) => selectedIds.has(edge.from) && selectedIds.has(edge.to))
|
||||
.slice(0, maxEdges);
|
||||
|
||||
return { nodes, edges };
|
||||
}
|
||||
|
||||
async function fallbackGraphStatus(projectDir: string): Promise<GraphStatusResult> {
|
||||
const graph = readGraphFile(projectDir);
|
||||
if (!graph) return { exists: false, stale: false };
|
||||
if (!graph.builtAt) return { exists: true, stale: false };
|
||||
|
||||
const builtAtMs = Date.parse(graph.builtAt);
|
||||
if (!Number.isFinite(builtAtMs)) return { exists: true, stale: false };
|
||||
|
||||
const ageHours = (Date.now() - builtAtMs) / (1000 * 60 * 60);
|
||||
return { exists: true, stale: ageHours > 24, ageHours };
|
||||
}
|
||||
|
||||
function isGraphApi(mod: unknown): mod is GraphApi {
|
||||
if (!mod || typeof mod !== "object") return false;
|
||||
const candidate = mod as Record<string, unknown>;
|
||||
return typeof candidate.graphQuery === "function" && typeof candidate.graphStatus === "function";
|
||||
}
|
||||
|
||||
async function resolveGraphApi(): Promise<GraphApi> {
|
||||
if (resolvedGraphApi && cachedGraphApi) return cachedGraphApi;
|
||||
|
||||
resolvedGraphApi = true;
|
||||
try {
|
||||
const imported = await import("@gsd-build/mcp-server");
|
||||
if (isGraphApi(imported)) {
|
||||
cachedGraphApi = imported;
|
||||
return cachedGraphApi;
|
||||
}
|
||||
logWarning("prompt", "@gsd-build/mcp-server graph exports unavailable; using local graph fallback");
|
||||
} catch {
|
||||
// Fall back to local reader implementation.
|
||||
}
|
||||
|
||||
cachedGraphApi = {
|
||||
graphQuery: fallbackGraphQuery,
|
||||
graphStatus: fallbackGraphStatus,
|
||||
};
|
||||
return cachedGraphApi;
|
||||
}
|
||||
|
||||
/**
|
||||
* Query the knowledge graph for nodes related to the given term and format
|
||||
* the result as an inlined context block.
|
||||
|
|
@ -33,18 +164,14 @@ export async function inlineGraphSubgraph(
|
|||
if (!term || !term.trim()) return null;
|
||||
|
||||
try {
|
||||
const { graphQuery, graphStatus } = await import("@gsd-build/mcp-server") as {
|
||||
graphQuery: (projectDir: string, term: string, budget?: number) => Promise<GraphQueryResult>;
|
||||
graphStatus: (projectDir: string) => Promise<GraphStatusResult>;
|
||||
};
|
||||
|
||||
const result = await graphQuery(projectDir, term, opts.budget);
|
||||
const graphApi = await resolveGraphApi();
|
||||
const result = await graphApi.graphQuery(projectDir, term, opts.budget);
|
||||
if (result.nodes.length === 0) return null;
|
||||
|
||||
// Check staleness for annotation
|
||||
let staleAnnotation = "";
|
||||
try {
|
||||
const status = await graphStatus(projectDir);
|
||||
const status = await graphApi.graphStatus(projectDir);
|
||||
if (status.exists && status.stale && status.ageHours !== undefined) {
|
||||
const hours = Math.round(status.ageHours);
|
||||
staleAnnotation = `\n> ⚠ Graph last built ${hours}h ago — context may be outdated`;
|
||||
|
|
@ -54,14 +181,14 @@ export async function inlineGraphSubgraph(
|
|||
}
|
||||
|
||||
// Format nodes as a compact list
|
||||
const nodeLines = result.nodes.map((n) => {
|
||||
const desc = n.description ? ` — ${n.description}` : "";
|
||||
return `- **${n.label}** (\`${n.type}\`, ${n.confidence})${desc}`;
|
||||
const nodeLines = result.nodes.map((node) => {
|
||||
const desc = node.description ? ` — ${node.description}` : "";
|
||||
return `- **${node.label}** (\`${node.type}\`, ${node.confidence})${desc}`;
|
||||
});
|
||||
|
||||
// Format edges as relations (only if present)
|
||||
const edgeLines = result.edges.length > 0
|
||||
? result.edges.map((e) => `- \`${e.from}\` →[${e.type}]→ \`${e.to}\``)
|
||||
? result.edges.map((edge) => `- \`${edge.from}\` →[${edge.type}]→ \`${edge.to}\``)
|
||||
: [];
|
||||
|
||||
const sections: string[] = [
|
||||
|
|
|
|||
|
|
@ -13,11 +13,15 @@ test("auto-mode captures GSD_PROJECT_ROOT before entering the dispatch loop", ()
|
|||
const resumeCallIdx = source.indexOf("captureProjectRootEnv(s.originalBasePath || s.basePath);");
|
||||
assert.ok(resumeCallIdx > -1, "auto.ts should capture GSD_PROJECT_ROOT before resume autoLoop");
|
||||
|
||||
const firstAutoLoopIdx = source.indexOf("await autoLoop(ctx, pi, s, buildLoopDeps());");
|
||||
assert.ok(firstAutoLoopIdx > -1, "auto.ts should invoke autoLoop()");
|
||||
const firstLoopIdxCandidates = [
|
||||
source.indexOf("await runAutoLoopWithUok({"),
|
||||
source.indexOf("await autoLoop(ctx, pi, s, buildLoopDeps());"),
|
||||
].filter((idx) => idx > -1);
|
||||
const firstAutoLoopIdx = firstLoopIdxCandidates.length > 0 ? Math.min(...firstLoopIdxCandidates) : -1;
|
||||
assert.ok(firstAutoLoopIdx > -1, "auto.ts should invoke the auto dispatch loop");
|
||||
assert.ok(
|
||||
resumeCallIdx < firstAutoLoopIdx,
|
||||
"auto.ts must set GSD_PROJECT_ROOT before the first autoLoop() call",
|
||||
"auto.ts must set GSD_PROJECT_ROOT before the first loop call",
|
||||
);
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -25,8 +25,12 @@ console.log("\n=== resume path refreshes resources and opens DB before rebuildSt
|
|||
const resumeSectionStart = autoSrc.indexOf("if (s.paused) {", autoSrc.indexOf("// If resuming from paused state"));
|
||||
assertTrue(resumeSectionStart > 0, "auto.ts has the paused-session resume block");
|
||||
|
||||
const resumeSectionEnd = autoSrc.indexOf("await autoLoop(", resumeSectionStart);
|
||||
assertTrue(resumeSectionEnd > resumeSectionStart, "resume block reaches autoLoop");
|
||||
const resumeSectionEndCandidates = [
|
||||
autoSrc.indexOf("await runAutoLoopWithUok(", resumeSectionStart),
|
||||
autoSrc.indexOf("await autoLoop(", resumeSectionStart),
|
||||
].filter((idx) => idx > resumeSectionStart);
|
||||
const resumeSectionEnd = resumeSectionEndCandidates.length > 0 ? Math.min(...resumeSectionEndCandidates) : -1;
|
||||
assertTrue(resumeSectionEnd > resumeSectionStart, "resume block reaches the dispatch loop");
|
||||
|
||||
const resumeSection = autoSrc.slice(resumeSectionStart, resumeSectionEnd);
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,14 @@ import { MAX_FINALIZE_TIMEOUTS } from "../auto/types.ts";
|
|||
|
||||
const { assertTrue, assertEq, report } = createTestContext();
|
||||
|
||||
function getRunFinalizeBody(phasesSource: string): string {
|
||||
const fnIdx = phasesSource.indexOf("export async function runFinalize(");
|
||||
assertTrue(fnIdx > 0, "runFinalize function should exist in phases.ts");
|
||||
|
||||
const nextExportIdx = phasesSource.indexOf("\nexport ", fnIdx + 1);
|
||||
return phasesSource.slice(fnIdx, nextExportIdx > fnIdx ? nextExportIdx : undefined);
|
||||
}
|
||||
|
||||
// ═══ Test: withTimeout resolves when inner promise resolves promptly ══════════
|
||||
|
||||
{
|
||||
|
|
@ -145,11 +153,7 @@ const { assertTrue, assertEq, report } = createTestContext();
|
|||
"utf-8",
|
||||
);
|
||||
|
||||
// Find the runFinalize function body
|
||||
const fnIdx = phasesSource.indexOf("export async function runFinalize(");
|
||||
assertTrue(fnIdx > 0, "runFinalize function should exist in phases.ts");
|
||||
|
||||
const fnBody = phasesSource.slice(fnIdx, fnIdx + 8000);
|
||||
const fnBody = getRunFinalizeBody(phasesSource);
|
||||
|
||||
// postUnitPreVerification must be wrapped in withTimeout
|
||||
const preTimeoutIdx = fnBody.indexOf("withTimeout(");
|
||||
|
|
@ -207,8 +211,7 @@ const { assertTrue, assertEq, report } = createTestContext();
|
|||
"utf-8",
|
||||
);
|
||||
|
||||
const fnIdx = phasesSource.indexOf("export async function runFinalize(");
|
||||
const fnBody = phasesSource.slice(fnIdx, fnIdx + 8000);
|
||||
const fnBody = getRunFinalizeBody(phasesSource);
|
||||
|
||||
// Both timeout handlers should increment consecutiveFinalizeTimeouts
|
||||
const incrementCount = (fnBody.match(/consecutiveFinalizeTimeouts\+\+/g) || []).length;
|
||||
|
|
|
|||
|
|
@ -21,7 +21,8 @@ test("postUnitPreVerification rebuilds STATE.md before worktree sync", () => {
|
|||
const fnStart = source.indexOf("export async function postUnitPreVerification");
|
||||
assert.ok(fnStart > 0, "postUnitPreVerification should exist");
|
||||
|
||||
const section = source.slice(fnStart, fnStart + 8000);
|
||||
const fnEnd = source.indexOf("export async function postUnitPostVerification", fnStart);
|
||||
const section = source.slice(fnStart, fnEnd > fnStart ? fnEnd : undefined);
|
||||
const rebuildIdx = section.indexOf('await runSafely("postUnit", "state-rebuild"');
|
||||
const syncIdx = section.indexOf('await runSafely("postUnit", "worktree-sync"');
|
||||
|
||||
|
|
|
|||
|
|
@ -430,11 +430,18 @@ export async function handleCompleteSlice(
|
|||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
(async () => {
|
||||
try {
|
||||
const graphMod = await import("@gsd-build/mcp-server") as {
|
||||
const graphMod = await import("@gsd-build/mcp-server") as unknown as Partial<{
|
||||
buildGraph: (dir: string) => Promise<{ nodes: unknown[]; edges: unknown[]; builtAt: string }>;
|
||||
writeGraph: (gsdRoot: string, graph: unknown) => Promise<void>;
|
||||
resolveGsdRoot: (basePath: string) => string;
|
||||
};
|
||||
}>;
|
||||
if (
|
||||
typeof graphMod.buildGraph !== "function"
|
||||
|| typeof graphMod.writeGraph !== "function"
|
||||
|| typeof graphMod.resolveGsdRoot !== "function"
|
||||
) {
|
||||
throw new Error("graph helpers unavailable from @gsd-build/mcp-server");
|
||||
}
|
||||
const g = await graphMod.buildGraph(basePath);
|
||||
await graphMod.writeGraph(graphMod.resolveGsdRoot(basePath), g);
|
||||
} catch (graphErr) {
|
||||
|
|
|
|||
|
|
@ -294,8 +294,9 @@ function _push(
|
|||
},
|
||||
}),
|
||||
);
|
||||
} catch {
|
||||
} catch (auditEmitErr) {
|
||||
// Best-effort: unified audit projection must never block workflow logger.
|
||||
_writeStderr(`[gsd:workflow-logger] unified-audit emit failed: ${(auditEmitErr as Error).message}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue