feat(graph): implement knowledge graph system (closes #4202)
Ports the v1 graphify system to v2 as a native TypeScript implementation.
The knowledge graph builds semantic relationships between milestones, slices,
tasks, and knowledge entries — and injects relevant subgraphs automatically
into every agent dispatch prompt.
## Core implementation (packages/mcp-server/src/readers/graph.ts)
- `buildGraph(projectDir)` — walks all .gsd/ artifacts (STATE.md,
milestone PLANs, slice PLANs, KNOWLEDGE.md), extracts nodes and edges
with confidence tiers (EXTRACTED / INFERRED / AMBIGUOUS). Parse errors
skip the node rather than crashing.
- `writeGraph(gsdRoot, graph)` — atomic write via tmp file + rename.
- `writeSnapshot(gsdRoot)` — saves a diff baseline before each rebuild.
- `graphQuery(projectDir, term, budget?)` — BFS subgraph search with
case-insensitive matching on label + description; trims AMBIGUOUS edges
first, then INFERRED, respecting the token budget (default 4 000).
- `graphStatus(projectDir)` — freshness check; stale = older than 24 h.
- `graphDiff(projectDir)` — compares current graph to last snapshot,
returns added / removed / changed counts for nodes and edges.
## MCP tool (packages/mcp-server/src/server.ts)
Registers `gsd_graph` immediately after `gsd_knowledge` with four modes:
build | query | status | diff. All errors returned as isError: true.
## CLI subcommand (src/cli.ts, src/help-text.ts)
`gsd graph build|status|query <term>|diff` — follows the established
`if (cliFlags.messages[0] === '...')` dispatch pattern. Uses
`resolveGsdRoot()` for git-root-aware path resolution (not a naive
`.gsd` append). Help text updated with correct positional argument format.
## Auto-rebuild after slice completion
(src/resources/extensions/gsd/tools/complete-slice.ts)
Fire-and-forget `buildGraph → writeGraph` triggered after every slice
completion. Uses `@gsd-build/mcp-server` package import (not a relative
src path) and `resolveGsdRoot()` for correct path resolution in monorepos.
## Graph-aware dispatch injection
(src/resources/extensions/gsd/graph-context.ts,
src/resources/extensions/gsd/auto-prompts.ts)
`inlineGraphSubgraph(projectDir, term, { budget })` queries the graph and
formats the result as a `### Knowledge Graph Context` markdown block,
consistent with all other inlined context blocks. Adds a stale warning
annotation when the graph is older than 24 h. Returns null (graceful
skip) when graph.json is missing, the query returns zero nodes, or the
import fails — no agent dispatch is ever blocked by graph availability.
Injected into three prompt builders:
- `buildResearchSlicePrompt` — 3 000 token budget
- `buildPlanSlicePrompt` — 3 000 token budget
- `buildExecuteTaskPrompt` — 2 000 token budget
## Tests
- 22 tests for the core graph reader (graph.test.ts)
- 14 tests for the dispatch injection helper (graph-context.test.ts)
- All tests use real on-disk fixtures (no module mocking needed)
- Full suite: 6 318 passed, 0 failed
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
f4f365a27a
commit
15bccca78f
11 changed files with 1800 additions and 0 deletions
|
|
@ -13,6 +13,9 @@ export type {
|
|||
} from './types.js';
|
||||
export { MAX_EVENTS, INIT_TIMEOUT_MS } from './types.js';
|
||||
|
||||
// Path resolution utilities
|
||||
export { resolveGsdRoot } from './readers/paths.js';
|
||||
|
||||
// Read-only state readers (usable without a running session)
|
||||
export { readProgress } from './readers/state.js';
|
||||
export type { ProgressResult } from './readers/state.js';
|
||||
|
|
@ -26,3 +29,15 @@ export { readKnowledge } from './readers/knowledge.js';
|
|||
export type { KnowledgeResult, KnowledgeEntry } from './readers/knowledge.js';
|
||||
export { runDoctorLite } from './readers/doctor-lite.js';
|
||||
export type { DoctorResult, DoctorIssue } from './readers/doctor-lite.js';
|
||||
export { buildGraph, writeGraph, writeSnapshot, graphStatus, graphQuery, graphDiff } from './readers/graph.js';
|
||||
export type {
|
||||
NodeType,
|
||||
EdgeType,
|
||||
ConfidenceTier,
|
||||
GraphNode,
|
||||
GraphEdge,
|
||||
KnowledgeGraph,
|
||||
GraphStatusResult,
|
||||
GraphQueryResult,
|
||||
GraphDiffResult,
|
||||
} from './readers/graph.js';
|
||||
|
|
|
|||
426
packages/mcp-server/src/readers/graph.test.ts
Normal file
426
packages/mcp-server/src/readers/graph.test.ts
Normal file
|
|
@ -0,0 +1,426 @@
|
|||
// GSD MCP Server — knowledge graph reader tests
|
||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||
|
||||
import { describe, it, before, after, beforeEach, afterEach } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdirSync, writeFileSync, rmSync, existsSync, readFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
|
||||
import {
|
||||
buildGraph,
|
||||
writeGraph,
|
||||
writeSnapshot,
|
||||
graphStatus,
|
||||
graphQuery,
|
||||
graphDiff,
|
||||
} from './graph.js';
|
||||
import type { KnowledgeGraph } from './graph.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Fixture helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function tmpProject(): string {
|
||||
const dir = join(tmpdir(), `gsd-graph-test-${randomBytes(4).toString('hex')}`);
|
||||
mkdirSync(dir, { recursive: true });
|
||||
return dir;
|
||||
}
|
||||
|
||||
function writeFixture(base: string, relPath: string, content: string): void {
|
||||
const full = join(base, relPath);
|
||||
mkdirSync(join(full, '..'), { recursive: true });
|
||||
writeFileSync(full, content, 'utf-8');
|
||||
}
|
||||
|
||||
function makeProjectWithArtifacts(projectDir: string): void {
|
||||
writeFixture(projectDir, '.gsd/STATE.md', [
|
||||
'# GSD State',
|
||||
'',
|
||||
'**Active Milestone:** M001: Auth System',
|
||||
'**Active Slice:** S01: Login flow',
|
||||
'**Phase:** execution',
|
||||
'',
|
||||
'## Milestone Registry',
|
||||
'',
|
||||
'- 🔄 **M001:** Auth System',
|
||||
'',
|
||||
'## Next Action',
|
||||
'',
|
||||
'Execute T01 in S01.',
|
||||
].join('\n'));
|
||||
|
||||
writeFixture(projectDir, '.gsd/KNOWLEDGE.md', [
|
||||
'# Project Knowledge',
|
||||
'',
|
||||
'## Rules',
|
||||
'',
|
||||
'| # | Scope | Rule | Why | Added |',
|
||||
'|---|-------|------|-----|-------|',
|
||||
'| K001 | auth | Hash passwords with bcrypt | Security requirement | manual |',
|
||||
'| K002 | db | Use transactions for multi-table | Data consistency | auto |',
|
||||
'',
|
||||
'## Patterns',
|
||||
'',
|
||||
'| # | Pattern | Where | Notes |',
|
||||
'|---|---------|-------|-------|',
|
||||
'| P001 | Singleton services | services/ | Prevents duplication |',
|
||||
'',
|
||||
'## Lessons Learned',
|
||||
'',
|
||||
'| # | What Happened | Root Cause | Fix | Scope |',
|
||||
'|---|--------------|------------|-----|-------|',
|
||||
'| L001 | CI tests failed | Env diff | Added setup script | testing |',
|
||||
].join('\n'));
|
||||
|
||||
writeFixture(projectDir, '.gsd/milestones/M001/M001-ROADMAP.md', [
|
||||
'# M001: Auth System',
|
||||
'',
|
||||
'## Vision',
|
||||
'',
|
||||
'Build authentication for the platform.',
|
||||
'',
|
||||
'## Slice Overview',
|
||||
'',
|
||||
'| ID | Slice | Risk | Depends | Done | After this |',
|
||||
'|----|-------|------|---------|------|------------|',
|
||||
'| S01 | Login flow | low | — | 🔄 | Users can log in |',
|
||||
].join('\n'));
|
||||
|
||||
writeFixture(projectDir, '.gsd/milestones/M001/slices/S01/S01-PLAN.md', [
|
||||
'# S01: Login flow',
|
||||
'',
|
||||
'## Tasks',
|
||||
'',
|
||||
'- [ ] **T01: Implement login endpoint** — Core auth logic',
|
||||
'- [ ] **T02: Add session management** — Keep users logged in',
|
||||
].join('\n'));
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// buildGraph tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('buildGraph', () => {
|
||||
let projectDir: string;
|
||||
|
||||
before(() => {
|
||||
projectDir = tmpProject();
|
||||
makeProjectWithArtifacts(projectDir);
|
||||
});
|
||||
|
||||
after(() => rmSync(projectDir, { recursive: true, force: true }));
|
||||
|
||||
it('returns nodeCount > 0 for a project with artifacts', async () => {
|
||||
const graph = await buildGraph(projectDir);
|
||||
assert.ok(graph.nodes.length > 0, `Expected nodes, got ${graph.nodes.length}`);
|
||||
});
|
||||
|
||||
it('returns edgeCount >= 0 (valid graph structure)', async () => {
|
||||
const graph = await buildGraph(projectDir);
|
||||
assert.ok(graph.edges.length >= 0);
|
||||
});
|
||||
|
||||
it('includes builtAt ISO timestamp', async () => {
|
||||
const graph = await buildGraph(projectDir);
|
||||
assert.ok(typeof graph.builtAt === 'string');
|
||||
assert.ok(!isNaN(Date.parse(graph.builtAt)));
|
||||
});
|
||||
|
||||
it('skips unparseable artifact and does not throw', async () => {
|
||||
const badProject = tmpProject();
|
||||
// Write a corrupt/minimal STATE.md that is technically valid but empty
|
||||
writeFixture(badProject, '.gsd/STATE.md', 'not valid gsd state at all \0\0\0');
|
||||
// Should not throw
|
||||
const graph = await buildGraph(badProject);
|
||||
assert.ok(graph.nodes.length >= 0);
|
||||
rmSync(badProject, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('returns empty graph for project with no .gsd/ directory', async () => {
|
||||
const emptyProject = tmpProject();
|
||||
const graph = await buildGraph(emptyProject);
|
||||
assert.ok(graph.nodes.length >= 0); // no throw
|
||||
assert.equal(typeof graph.builtAt, 'string');
|
||||
rmSync(emptyProject, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('nodes have required fields: id, label, type, confidence', async () => {
|
||||
const graph = await buildGraph(projectDir);
|
||||
for (const node of graph.nodes) {
|
||||
assert.ok(typeof node.id === 'string', 'node.id must be string');
|
||||
assert.ok(typeof node.label === 'string', 'node.label must be string');
|
||||
assert.ok(typeof node.type === 'string', 'node.type must be string');
|
||||
assert.ok(
|
||||
node.confidence === 'EXTRACTED' ||
|
||||
node.confidence === 'INFERRED' ||
|
||||
node.confidence === 'AMBIGUOUS',
|
||||
`Invalid confidence: ${node.confidence}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// writeGraph tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('writeGraph', () => {
|
||||
let projectDir: string;
|
||||
let graph: KnowledgeGraph;
|
||||
|
||||
before(async () => {
|
||||
projectDir = tmpProject();
|
||||
makeProjectWithArtifacts(projectDir);
|
||||
graph = await buildGraph(projectDir);
|
||||
});
|
||||
|
||||
after(() => rmSync(projectDir, { recursive: true, force: true }));
|
||||
|
||||
it('creates graph.json in .gsd/graphs/ after writeGraph()', async () => {
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
await writeGraph(gsdRoot, graph);
|
||||
const graphPath = join(gsdRoot, 'graphs', 'graph.json');
|
||||
assert.ok(existsSync(graphPath), `Expected ${graphPath} to exist`);
|
||||
});
|
||||
|
||||
it('write is atomic — no temp file remains after writeGraph()', async () => {
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
await writeGraph(gsdRoot, graph);
|
||||
const tmpPath = join(gsdRoot, 'graphs', 'graph.tmp.json');
|
||||
assert.ok(!existsSync(tmpPath), 'Temp file should not exist after successful write');
|
||||
});
|
||||
|
||||
it('written graph.json is valid JSON with nodes and edges', async () => {
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
await writeGraph(gsdRoot, graph);
|
||||
const raw = readFileSync(join(gsdRoot, 'graphs', 'graph.json'), 'utf-8');
|
||||
const parsed = JSON.parse(raw) as KnowledgeGraph;
|
||||
assert.ok(Array.isArray(parsed.nodes));
|
||||
assert.ok(Array.isArray(parsed.edges));
|
||||
assert.ok(typeof parsed.builtAt === 'string');
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// graphStatus tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('graphStatus', () => {
|
||||
let projectDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
projectDir = tmpProject();
|
||||
});
|
||||
|
||||
afterEach(() => rmSync(projectDir, { recursive: true, force: true }));
|
||||
|
||||
it('returns { exists: false } when no graph.json exists', async () => {
|
||||
const status = await graphStatus(projectDir);
|
||||
assert.equal(status.exists, false);
|
||||
});
|
||||
|
||||
it('returns { exists: true, nodeCount, edgeCount, ageHours } when graph exists', async () => {
|
||||
makeProjectWithArtifacts(projectDir);
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
const graph = await buildGraph(projectDir);
|
||||
await writeGraph(gsdRoot, graph);
|
||||
|
||||
const status = await graphStatus(projectDir);
|
||||
assert.equal(status.exists, true);
|
||||
assert.ok(typeof status.nodeCount === 'number');
|
||||
assert.ok(typeof status.edgeCount === 'number');
|
||||
assert.ok(typeof status.ageHours === 'number');
|
||||
assert.ok(status.ageHours >= 0);
|
||||
});
|
||||
|
||||
it('stale = false for a freshly built graph', async () => {
|
||||
makeProjectWithArtifacts(projectDir);
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
const graph = await buildGraph(projectDir);
|
||||
await writeGraph(gsdRoot, graph);
|
||||
|
||||
const status = await graphStatus(projectDir);
|
||||
assert.equal(status.stale, false);
|
||||
});
|
||||
|
||||
it('stale = true for a graph older than 24h (builtAt backdated)', async () => {
|
||||
makeProjectWithArtifacts(projectDir);
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
mkdirSync(join(gsdRoot, 'graphs'), { recursive: true });
|
||||
|
||||
// Write a graph with a builtAt 25 hours ago
|
||||
const oldGraph: KnowledgeGraph = {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
builtAt: new Date(Date.now() - 25 * 60 * 60 * 1000).toISOString(),
|
||||
};
|
||||
writeFileSync(
|
||||
join(gsdRoot, 'graphs', 'graph.json'),
|
||||
JSON.stringify(oldGraph),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const status = await graphStatus(projectDir);
|
||||
assert.equal(status.exists, true);
|
||||
assert.equal(status.stale, true);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// graphQuery tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('graphQuery', () => {
|
||||
let projectDir: string;
|
||||
|
||||
before(async () => {
|
||||
projectDir = tmpProject();
|
||||
makeProjectWithArtifacts(projectDir);
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
const graph = await buildGraph(projectDir);
|
||||
await writeGraph(gsdRoot, graph);
|
||||
});
|
||||
|
||||
after(() => rmSync(projectDir, { recursive: true, force: true }));
|
||||
|
||||
it('returns matching nodes for a known term', async () => {
|
||||
const result = await graphQuery(projectDir, 'auth');
|
||||
assert.ok(Array.isArray(result.nodes));
|
||||
// Should match nodes with 'auth' in label or description
|
||||
assert.ok(result.nodes.length > 0, 'Expected at least one match for "auth"');
|
||||
});
|
||||
|
||||
it('returns empty array for a term that matches nothing', async () => {
|
||||
const result = await graphQuery(projectDir, 'xxxxxxnotfound999zzz');
|
||||
assert.ok(Array.isArray(result.nodes));
|
||||
assert.equal(result.nodes.length, 0);
|
||||
});
|
||||
|
||||
it('search is case-insensitive', async () => {
|
||||
const lower = await graphQuery(projectDir, 'auth');
|
||||
const upper = await graphQuery(projectDir, 'AUTH');
|
||||
assert.deepEqual(
|
||||
lower.nodes.map((n) => n.id).sort(),
|
||||
upper.nodes.map((n) => n.id).sort(),
|
||||
);
|
||||
});
|
||||
|
||||
it('budget trims AMBIGUOUS edges first', async () => {
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
// Write a graph with mixed confidence edges
|
||||
const mixedGraph: KnowledgeGraph = {
|
||||
builtAt: new Date().toISOString(),
|
||||
nodes: [
|
||||
{ id: 'n1', label: 'seed node budget', type: 'milestone', confidence: 'EXTRACTED' },
|
||||
{ id: 'n2', label: 'connected via AMBIGUOUS', type: 'task', confidence: 'AMBIGUOUS' },
|
||||
{ id: 'n3', label: 'connected via INFERRED', type: 'task', confidence: 'INFERRED' },
|
||||
],
|
||||
edges: [
|
||||
{ from: 'n1', to: 'n2', type: 'contains', confidence: 'AMBIGUOUS' },
|
||||
{ from: 'n1', to: 'n3', type: 'contains', confidence: 'INFERRED' },
|
||||
],
|
||||
};
|
||||
await writeGraph(gsdRoot, mixedGraph);
|
||||
|
||||
// With a very small budget, AMBIGUOUS edges should be trimmed first
|
||||
const result = await graphQuery(projectDir, 'seed node budget', 10);
|
||||
// At minimum, the seed node itself should be present
|
||||
assert.ok(result.nodes.some((n) => n.id === 'n1'), 'Seed node should be in result');
|
||||
|
||||
// Restore the original graph
|
||||
const originalGraph = await buildGraph(projectDir);
|
||||
await writeGraph(gsdRoot, originalGraph);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// writeSnapshot + graphDiff tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('graphDiff', () => {
|
||||
let projectDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
projectDir = tmpProject();
|
||||
makeProjectWithArtifacts(projectDir);
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
const graph = await buildGraph(projectDir);
|
||||
await writeGraph(gsdRoot, graph);
|
||||
});
|
||||
|
||||
afterEach(() => rmSync(projectDir, { recursive: true, force: true }));
|
||||
|
||||
it('returns empty diff when comparing graph to itself (snapshot = current)', async () => {
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
await writeSnapshot(gsdRoot);
|
||||
const diff = await graphDiff(projectDir);
|
||||
assert.ok(Array.isArray(diff.nodes.added));
|
||||
assert.ok(Array.isArray(diff.nodes.removed));
|
||||
assert.ok(Array.isArray(diff.nodes.changed));
|
||||
assert.equal(diff.nodes.added.length, 0);
|
||||
assert.equal(diff.nodes.removed.length, 0);
|
||||
});
|
||||
|
||||
it('returns added nodes when a new node appears after snapshot', async () => {
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
// Take snapshot of the original graph
|
||||
await writeSnapshot(gsdRoot);
|
||||
|
||||
// Now write a graph with an extra node
|
||||
const extraGraph: KnowledgeGraph = {
|
||||
builtAt: new Date().toISOString(),
|
||||
nodes: [
|
||||
{ id: 'brand-new-node', label: 'New Feature', type: 'milestone', confidence: 'EXTRACTED' },
|
||||
],
|
||||
edges: [],
|
||||
};
|
||||
await writeGraph(gsdRoot, extraGraph);
|
||||
|
||||
const diff = await graphDiff(projectDir);
|
||||
assert.ok(diff.nodes.added.includes('brand-new-node'), 'new node should be in added');
|
||||
});
|
||||
|
||||
it('returns removed nodes when a node disappears after snapshot', async () => {
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
// Create snapshot with a node that won't exist in current graph
|
||||
const snapshotGraph: KnowledgeGraph = {
|
||||
builtAt: new Date().toISOString(),
|
||||
nodes: [
|
||||
{ id: 'old-node-to-be-removed', label: 'Old', type: 'task', confidence: 'EXTRACTED' },
|
||||
],
|
||||
edges: [],
|
||||
};
|
||||
writeFileSync(
|
||||
join(gsdRoot, 'graphs', '.last-build-snapshot.json'),
|
||||
JSON.stringify({ ...snapshotGraph, snapshotAt: new Date().toISOString() }),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
// Current graph.json has no such node
|
||||
const diff = await graphDiff(projectDir);
|
||||
assert.ok(diff.nodes.removed.includes('old-node-to-be-removed'), 'old node should be in removed');
|
||||
});
|
||||
|
||||
it('returns empty diff structure when no snapshot exists', async () => {
|
||||
// No snapshot file — diff should be empty/meaningful
|
||||
const diff = await graphDiff(projectDir);
|
||||
assert.ok(Array.isArray(diff.nodes.added));
|
||||
assert.ok(Array.isArray(diff.nodes.removed));
|
||||
assert.ok(Array.isArray(diff.nodes.changed));
|
||||
assert.ok(Array.isArray(diff.edges.added));
|
||||
assert.ok(Array.isArray(diff.edges.removed));
|
||||
});
|
||||
|
||||
it('writeSnapshot creates .last-build-snapshot.json with snapshotAt', async () => {
|
||||
const gsdRoot = join(projectDir, '.gsd');
|
||||
await writeSnapshot(gsdRoot);
|
||||
const snapshotPath = join(gsdRoot, 'graphs', '.last-build-snapshot.json');
|
||||
assert.ok(existsSync(snapshotPath));
|
||||
const raw = readFileSync(snapshotPath, 'utf-8');
|
||||
const parsed = JSON.parse(raw) as KnowledgeGraph & { snapshotAt: string };
|
||||
assert.ok(typeof parsed.snapshotAt === 'string');
|
||||
assert.ok(!isNaN(Date.parse(parsed.snapshotAt)));
|
||||
});
|
||||
});
|
||||
708
packages/mcp-server/src/readers/graph.ts
Normal file
708
packages/mcp-server/src/readers/graph.ts
Normal file
|
|
@ -0,0 +1,708 @@
|
|||
// GSD MCP Server — knowledge graph reader
|
||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||
|
||||
/**
|
||||
* Knowledge Graph for GSD projects.
|
||||
*
|
||||
* Parses .gsd/ artifacts (STATE.md, milestone ROADMAPs, slice PLANs,
|
||||
* KNOWLEDGE.md) into a graph of nodes and edges. Parse errors in any
|
||||
* single artifact are caught and never propagate — the artifact is skipped
|
||||
* and the rest of the graph is returned.
|
||||
*
|
||||
* writeGraph() is atomic: writes to graph.tmp.json then renames to graph.json.
|
||||
*/
|
||||
|
||||
import { readFileSync, writeFileSync, renameSync, existsSync, mkdirSync } from 'node:fs';
|
||||
import { join, resolve } from 'node:path';
|
||||
import { resolveGsdRoot, findMilestoneIds, resolveMilestoneDir, findSliceIds, resolveSliceDir } from './paths.js';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export type NodeType =
|
||||
| 'milestone'
|
||||
| 'slice'
|
||||
| 'task'
|
||||
| 'rule'
|
||||
| 'pattern'
|
||||
| 'lesson'
|
||||
| 'concept';
|
||||
|
||||
export type EdgeType =
|
||||
| 'contains'
|
||||
| 'depends_on'
|
||||
| 'relates_to'
|
||||
| 'implements';
|
||||
|
||||
export type ConfidenceTier = 'EXTRACTED' | 'INFERRED' | 'AMBIGUOUS';
|
||||
|
||||
export interface GraphNode {
|
||||
id: string;
|
||||
label: string;
|
||||
type: NodeType;
|
||||
description?: string;
|
||||
confidence: ConfidenceTier;
|
||||
sourceFile?: string;
|
||||
}
|
||||
|
||||
export interface GraphEdge {
|
||||
from: string;
|
||||
to: string;
|
||||
type: EdgeType;
|
||||
confidence: ConfidenceTier;
|
||||
}
|
||||
|
||||
export interface KnowledgeGraph {
|
||||
nodes: GraphNode[];
|
||||
edges: GraphEdge[];
|
||||
builtAt: string;
|
||||
}
|
||||
|
||||
export interface GraphStatusResult {
|
||||
exists: boolean;
|
||||
lastBuild?: string;
|
||||
nodeCount?: number;
|
||||
edgeCount?: number;
|
||||
stale?: boolean;
|
||||
ageHours?: number;
|
||||
}
|
||||
|
||||
export interface GraphQueryResult {
|
||||
nodes: GraphNode[];
|
||||
edges: GraphEdge[];
|
||||
term: string;
|
||||
budget: number;
|
||||
}
|
||||
|
||||
export interface GraphDiffResult {
|
||||
nodes: {
|
||||
added: string[];
|
||||
removed: string[];
|
||||
changed: string[];
|
||||
};
|
||||
edges: {
|
||||
added: string[];
|
||||
removed: string[];
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Graph file paths
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function graphsDir(gsdRoot: string): string {
|
||||
return join(gsdRoot, 'graphs');
|
||||
}
|
||||
|
||||
function graphJsonPath(gsdRoot: string): string {
|
||||
return join(graphsDir(gsdRoot), 'graph.json');
|
||||
}
|
||||
|
||||
function graphTmpPath(gsdRoot: string): string {
|
||||
return join(graphsDir(gsdRoot), 'graph.tmp.json');
|
||||
}
|
||||
|
||||
function snapshotPath(gsdRoot: string): string {
|
||||
return join(graphsDir(gsdRoot), '.last-build-snapshot.json');
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Parsers — each returns nodes/edges and never throws
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Parse STATE.md for active milestone and phase concepts.
|
||||
*/
|
||||
function parseStateFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
|
||||
const statePath = join(gsdRoot, 'STATE.md');
|
||||
if (!existsSync(statePath)) return;
|
||||
|
||||
let content: string;
|
||||
try {
|
||||
content = readFileSync(statePath, 'utf-8');
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract active milestone
|
||||
const activeMilestoneMatch = content.match(/\*\*Active Milestone:\*\*\s+([A-Z]\d+):\s+(.+)/i);
|
||||
if (activeMilestoneMatch) {
|
||||
const [, milestoneId, title] = activeMilestoneMatch;
|
||||
const id = `milestone:${milestoneId}`;
|
||||
if (!nodes.some((n) => n.id === id)) {
|
||||
nodes.push({
|
||||
id,
|
||||
label: `${milestoneId}: ${title.trim()}`,
|
||||
type: 'milestone',
|
||||
description: `Active milestone: ${milestoneId}`,
|
||||
confidence: 'EXTRACTED',
|
||||
sourceFile: 'STATE.md',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Extract phase as concept
|
||||
const phaseMatch = content.match(/\*\*Phase:\*\*\s+(\S+)/i);
|
||||
if (phaseMatch) {
|
||||
const phase = phaseMatch[1].trim();
|
||||
nodes.push({
|
||||
id: `concept:phase:${phase}`,
|
||||
label: `Phase: ${phase}`,
|
||||
type: 'concept',
|
||||
confidence: 'EXTRACTED',
|
||||
sourceFile: 'STATE.md',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse KNOWLEDGE.md for rules, patterns, and lessons.
|
||||
*/
|
||||
function parseKnowledgeFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
|
||||
const knowledgePath = join(gsdRoot, 'KNOWLEDGE.md');
|
||||
if (!existsSync(knowledgePath)) return;
|
||||
|
||||
let content: string;
|
||||
try {
|
||||
content = readFileSync(knowledgePath, 'utf-8');
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse Rules table
|
||||
const rulesMatch = content.match(/## Rules\s*\n([\s\S]*?)(?=\n## |$)/i);
|
||||
if (rulesMatch) {
|
||||
for (const line of rulesMatch[1].split('\n')) {
|
||||
if (!line.includes('|')) continue;
|
||||
const cells = line.split('|').map((c) => c.trim()).filter(Boolean);
|
||||
if (cells.length < 3) continue;
|
||||
if (cells[0].startsWith('#') || cells[0].startsWith('-')) continue;
|
||||
const id = cells[0];
|
||||
if (!/^K\d+$/i.test(id)) continue;
|
||||
nodes.push({
|
||||
id: `rule:${id}`,
|
||||
label: id,
|
||||
type: 'rule',
|
||||
description: cells[2] ?? '',
|
||||
confidence: 'EXTRACTED',
|
||||
sourceFile: 'KNOWLEDGE.md',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Patterns table
|
||||
const patternsMatch = content.match(/## Patterns\s*\n([\s\S]*?)(?=\n## |$)/i);
|
||||
if (patternsMatch) {
|
||||
for (const line of patternsMatch[1].split('\n')) {
|
||||
if (!line.includes('|')) continue;
|
||||
const cells = line.split('|').map((c) => c.trim()).filter(Boolean);
|
||||
if (cells.length < 2) continue;
|
||||
if (cells[0].startsWith('#') || cells[0].startsWith('-')) continue;
|
||||
const id = cells[0];
|
||||
if (!/^P\d+$/i.test(id)) continue;
|
||||
nodes.push({
|
||||
id: `pattern:${id}`,
|
||||
label: id,
|
||||
type: 'pattern',
|
||||
description: cells[1] ?? '',
|
||||
confidence: 'EXTRACTED',
|
||||
sourceFile: 'KNOWLEDGE.md',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Parse Lessons Learned table
|
||||
const lessonsMatch = content.match(/## Lessons Learned\s*\n([\s\S]*?)(?=\n## |$)/i);
|
||||
if (lessonsMatch) {
|
||||
for (const line of lessonsMatch[1].split('\n')) {
|
||||
if (!line.includes('|')) continue;
|
||||
const cells = line.split('|').map((c) => c.trim()).filter(Boolean);
|
||||
if (cells.length < 2) continue;
|
||||
if (cells[0].startsWith('#') || cells[0].startsWith('-')) continue;
|
||||
const id = cells[0];
|
||||
if (!/^L\d+$/i.test(id)) continue;
|
||||
nodes.push({
|
||||
id: `lesson:${id}`,
|
||||
label: id,
|
||||
type: 'lesson',
|
||||
description: cells[1] ?? '',
|
||||
confidence: 'EXTRACTED',
|
||||
sourceFile: 'KNOWLEDGE.md',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse milestone ROADMAP.md files for milestones and slices.
|
||||
*/
|
||||
function parseMilestoneFiles(
|
||||
gsdRoot: string,
|
||||
nodes: GraphNode[],
|
||||
edges: GraphEdge[],
|
||||
): void {
|
||||
const milestoneIds = findMilestoneIds(gsdRoot);
|
||||
|
||||
for (const milestoneId of milestoneIds) {
|
||||
try {
|
||||
parseSingleMilestone(gsdRoot, milestoneId, nodes, edges);
|
||||
} catch {
|
||||
// Skip this milestone on any error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseSingleMilestone(
|
||||
gsdRoot: string,
|
||||
milestoneId: string,
|
||||
nodes: GraphNode[],
|
||||
edges: GraphEdge[],
|
||||
): void {
|
||||
const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
|
||||
if (!mDir) return;
|
||||
|
||||
const milestoneNodeId = `milestone:${milestoneId}`;
|
||||
|
||||
// Try to read the roadmap file
|
||||
const roadmapPath = join(mDir, `${milestoneId}-ROADMAP.md`);
|
||||
let roadmapContent: string | null = null;
|
||||
if (existsSync(roadmapPath)) {
|
||||
try {
|
||||
roadmapContent = readFileSync(roadmapPath, 'utf-8');
|
||||
} catch {
|
||||
// Skip
|
||||
}
|
||||
}
|
||||
|
||||
// Extract milestone title from roadmap
|
||||
let milestoneTitle = milestoneId;
|
||||
if (roadmapContent) {
|
||||
const titleMatch = roadmapContent.match(/^#\s+[A-Z]\d+:\s+(.+)/m);
|
||||
if (titleMatch) milestoneTitle = `${milestoneId}: ${titleMatch[1].trim()}`;
|
||||
}
|
||||
|
||||
// Ensure milestone node exists
|
||||
if (!nodes.some((n) => n.id === milestoneNodeId)) {
|
||||
nodes.push({
|
||||
id: milestoneNodeId,
|
||||
label: milestoneTitle,
|
||||
type: 'milestone',
|
||||
confidence: 'EXTRACTED',
|
||||
sourceFile: roadmapContent ? `milestones/${milestoneId}/${milestoneId}-ROADMAP.md` : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
// Parse slices from roadmap table or filesystem
|
||||
const sliceIds = findSliceIds(gsdRoot, milestoneId);
|
||||
for (const sliceId of sliceIds) {
|
||||
try {
|
||||
parseSingleSlice(gsdRoot, milestoneId, sliceId, milestoneNodeId, nodes, edges);
|
||||
} catch {
|
||||
// Skip this slice on any error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseSingleSlice(
|
||||
gsdRoot: string,
|
||||
milestoneId: string,
|
||||
sliceId: string,
|
||||
milestoneNodeId: string,
|
||||
nodes: GraphNode[],
|
||||
edges: GraphEdge[],
|
||||
): void {
|
||||
const sDir = resolveSliceDir(gsdRoot, milestoneId, sliceId);
|
||||
if (!sDir) return;
|
||||
|
||||
const sliceNodeId = `slice:${milestoneId}:${sliceId}`;
|
||||
|
||||
// Try to read the slice plan
|
||||
const planPath = join(sDir, `${sliceId}-PLAN.md`);
|
||||
let sliceTitle = `${milestoneId}/${sliceId}`;
|
||||
let planContent: string | null = null;
|
||||
|
||||
if (existsSync(planPath)) {
|
||||
try {
|
||||
planContent = readFileSync(planPath, 'utf-8');
|
||||
const titleMatch = planContent.match(/^#\s+[A-Z]\d+:\s+(.+)/m);
|
||||
if (titleMatch) sliceTitle = `${sliceId}: ${titleMatch[1].trim()}`;
|
||||
} catch {
|
||||
// Use default title
|
||||
}
|
||||
}
|
||||
|
||||
nodes.push({
|
||||
id: sliceNodeId,
|
||||
label: sliceTitle,
|
||||
type: 'slice',
|
||||
confidence: 'EXTRACTED',
|
||||
sourceFile: planContent ? `milestones/${milestoneId}/slices/${sliceId}/${sliceId}-PLAN.md` : undefined,
|
||||
});
|
||||
|
||||
// Edge: milestone contains slice
|
||||
edges.push({
|
||||
from: milestoneNodeId,
|
||||
to: sliceNodeId,
|
||||
type: 'contains',
|
||||
confidence: 'EXTRACTED',
|
||||
});
|
||||
|
||||
// Parse tasks from the slice plan
|
||||
if (planContent) {
|
||||
parseTasksFromPlan(planContent, milestoneId, sliceId, sliceNodeId, nodes, edges);
|
||||
}
|
||||
}
|
||||
|
||||
function parseTasksFromPlan(
|
||||
content: string,
|
||||
milestoneId: string,
|
||||
sliceId: string,
|
||||
sliceNodeId: string,
|
||||
nodes: GraphNode[],
|
||||
edges: GraphEdge[],
|
||||
): void {
|
||||
// Match lines like: - [ ] **T01: Title** — description
|
||||
const taskPattern = /[-*]\s+\[[ x]\]\s+\*\*(T\d+):\s*([^*]+)\*\*/g;
|
||||
let match: RegExpExecArray | null;
|
||||
|
||||
while ((match = taskPattern.exec(content)) !== null) {
|
||||
const [, taskId, taskTitle] = match;
|
||||
const taskNodeId = `task:${milestoneId}:${sliceId}:${taskId}`;
|
||||
|
||||
nodes.push({
|
||||
id: taskNodeId,
|
||||
label: `${taskId}: ${taskTitle.trim()}`,
|
||||
type: 'task',
|
||||
confidence: 'EXTRACTED',
|
||||
});
|
||||
|
||||
edges.push({
|
||||
from: sliceNodeId,
|
||||
to: taskNodeId,
|
||||
type: 'contains',
|
||||
confidence: 'EXTRACTED',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// buildGraph
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Build a KnowledgeGraph by parsing all .gsd/ artifacts.
|
||||
*
|
||||
* Parse errors in any single artifact are caught — the artifact is skipped
|
||||
* and never causes buildGraph() to throw.
|
||||
*/
|
||||
export async function buildGraph(projectDir: string): Promise<KnowledgeGraph> {
|
||||
const gsdRoot = resolveGsdRoot(resolve(projectDir));
|
||||
|
||||
const nodes: GraphNode[] = [];
|
||||
const edges: GraphEdge[] = [];
|
||||
|
||||
// Each parser is wrapped so a crash in one never stops others
|
||||
const parsers: Array<(g: string, n: GraphNode[], e: GraphEdge[]) => void> = [
|
||||
parseStateFile,
|
||||
parseKnowledgeFile,
|
||||
parseMilestoneFiles,
|
||||
];
|
||||
|
||||
for (const parser of parsers) {
|
||||
try {
|
||||
parser(gsdRoot, nodes, edges);
|
||||
} catch {
|
||||
// Parsing error — skip this artifact, mark as ambiguous
|
||||
nodes.push({
|
||||
id: `error:${parser.name}:${Date.now()}`,
|
||||
label: `Parse error in ${parser.name}`,
|
||||
type: 'concept',
|
||||
confidence: 'AMBIGUOUS',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Deduplicate nodes by id (keep first occurrence)
|
||||
const seen = new Set<string>();
|
||||
const dedupedNodes = nodes.filter((n) => {
|
||||
if (seen.has(n.id)) return false;
|
||||
seen.add(n.id);
|
||||
return true;
|
||||
});
|
||||
|
||||
return {
|
||||
nodes: dedupedNodes,
|
||||
edges,
|
||||
builtAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// writeGraph — atomic write via tmp + rename
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Write the graph to .gsd/graphs/graph.json atomically.
|
||||
*
|
||||
* Writes to graph.tmp.json first, then renames to graph.json.
|
||||
* Creates the graphs/ directory if it does not exist.
|
||||
*/
|
||||
export async function writeGraph(gsdRoot: string, graph: KnowledgeGraph): Promise<void> {
|
||||
const dir = graphsDir(gsdRoot);
|
||||
mkdirSync(dir, { recursive: true });
|
||||
|
||||
const tmp = graphTmpPath(gsdRoot);
|
||||
const final = graphJsonPath(gsdRoot);
|
||||
|
||||
writeFileSync(tmp, JSON.stringify(graph, null, 2), 'utf-8');
|
||||
renameSync(tmp, final);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// writeSnapshot
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Copy the current graph.json to .last-build-snapshot.json.
|
||||
* Adds a snapshotAt timestamp to the copy.
|
||||
*/
|
||||
export async function writeSnapshot(gsdRoot: string): Promise<void> {
|
||||
const src = graphJsonPath(gsdRoot);
|
||||
if (!existsSync(src)) return;
|
||||
|
||||
const dir = graphsDir(gsdRoot);
|
||||
mkdirSync(dir, { recursive: true });
|
||||
|
||||
const raw = readFileSync(src, 'utf-8');
|
||||
let graph: KnowledgeGraph;
|
||||
try {
|
||||
graph = JSON.parse(raw) as KnowledgeGraph;
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
const snapshot = { ...graph, snapshotAt: new Date().toISOString() };
|
||||
|
||||
writeFileSync(snapshotPath(gsdRoot), JSON.stringify(snapshot, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// graphStatus
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Return status of the graph: whether it exists, its age, and whether it is stale.
|
||||
* Stale means builtAt is older than 24 hours.
|
||||
*/
|
||||
export async function graphStatus(projectDir: string): Promise<GraphStatusResult> {
|
||||
const gsdRoot = resolveGsdRoot(resolve(projectDir));
|
||||
const graphPath = graphJsonPath(gsdRoot);
|
||||
|
||||
if (!existsSync(graphPath)) {
|
||||
return { exists: false };
|
||||
}
|
||||
|
||||
try {
|
||||
const raw = readFileSync(graphPath, 'utf-8');
|
||||
const graph = JSON.parse(raw) as KnowledgeGraph;
|
||||
|
||||
const builtAt = graph.builtAt;
|
||||
const ageMs = Date.now() - new Date(builtAt).getTime();
|
||||
const ageHours = ageMs / (1000 * 60 * 60);
|
||||
const stale = ageHours > 24;
|
||||
|
||||
return {
|
||||
exists: true,
|
||||
lastBuild: builtAt,
|
||||
nodeCount: graph.nodes.length,
|
||||
edgeCount: graph.edges.length,
|
||||
stale,
|
||||
ageHours,
|
||||
};
|
||||
} catch {
|
||||
return { exists: false };
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// applyBudget — trim edges to stay within token budget
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Given a set of seed node IDs and the full graph, apply BFS to collect
|
||||
* reachable nodes and edges. Trims AMBIGUOUS edges first, then INFERRED,
|
||||
* stopping when the estimated token count drops within budget.
|
||||
*
|
||||
* Budget is a rough token estimate: 1 node ≈ 20 tokens, 1 edge ≈ 10 tokens.
|
||||
*/
|
||||
function applyBudget(
|
||||
graph: KnowledgeGraph,
|
||||
seedIds: Set<string>,
|
||||
budget: number,
|
||||
): { nodes: GraphNode[]; edges: GraphEdge[] } {
|
||||
// BFS to collect reachable nodes (start from seeds)
|
||||
const reachable = new Set<string>(seedIds);
|
||||
const queue = [...seedIds];
|
||||
|
||||
while (queue.length > 0) {
|
||||
const current = queue.shift()!;
|
||||
for (const edge of graph.edges) {
|
||||
if (edge.from === current && !reachable.has(edge.to)) {
|
||||
reachable.add(edge.to);
|
||||
queue.push(edge.to);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let resultNodes = graph.nodes.filter((n) => reachable.has(n.id));
|
||||
let resultEdges = graph.edges.filter(
|
||||
(e) => reachable.has(e.from) && reachable.has(e.to),
|
||||
);
|
||||
|
||||
// Estimate tokens and trim if over budget
|
||||
// Trim AMBIGUOUS edges first, then INFERRED
|
||||
const estimate = (): number =>
|
||||
resultNodes.length * 20 + resultEdges.length * 10;
|
||||
|
||||
if (estimate() > budget) {
|
||||
resultEdges = resultEdges.filter((e) => e.confidence !== 'AMBIGUOUS');
|
||||
}
|
||||
if (estimate() > budget) {
|
||||
resultEdges = resultEdges.filter((e) => e.confidence !== 'INFERRED');
|
||||
}
|
||||
if (estimate() > budget) {
|
||||
// Hard trim — keep only seed nodes and their EXTRACTED edges
|
||||
const seedNodes = resultNodes.filter((n) => seedIds.has(n.id));
|
||||
const seedEdges = resultEdges.filter(
|
||||
(e) => seedIds.has(e.from) && e.confidence === 'EXTRACTED',
|
||||
);
|
||||
return { nodes: seedNodes, edges: seedEdges };
|
||||
}
|
||||
|
||||
return { nodes: resultNodes, edges: resultEdges };
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// graphQuery
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Query the graph for nodes matching a term (case-insensitive on label + description).
|
||||
* BFS from seed nodes, applying budget trimming.
|
||||
*
|
||||
* Reads from the pre-built graph.json. Falls back to an empty result if no
|
||||
* graph exists.
|
||||
*/
|
||||
export async function graphQuery(
|
||||
projectDir: string,
|
||||
term: string,
|
||||
budget = 4000,
|
||||
): Promise<GraphQueryResult> {
|
||||
const gsdRoot = resolveGsdRoot(resolve(projectDir));
|
||||
const graphPath = graphJsonPath(gsdRoot);
|
||||
|
||||
if (!existsSync(graphPath)) {
|
||||
return { nodes: [], edges: [], term, budget };
|
||||
}
|
||||
|
||||
let graph: KnowledgeGraph;
|
||||
try {
|
||||
const raw = readFileSync(graphPath, 'utf-8');
|
||||
graph = JSON.parse(raw) as KnowledgeGraph;
|
||||
} catch {
|
||||
return { nodes: [], edges: [], term, budget };
|
||||
}
|
||||
|
||||
if (!term || term.trim() === '') {
|
||||
// Empty term — return empty result
|
||||
return { nodes: [], edges: [], term, budget };
|
||||
}
|
||||
|
||||
const lower = term.toLowerCase();
|
||||
|
||||
// Find seed nodes that match the term
|
||||
const seedIds = new Set<string>(
|
||||
graph.nodes
|
||||
.filter((n) => {
|
||||
const labelMatch = n.label.toLowerCase().includes(lower);
|
||||
const descMatch = n.description?.toLowerCase().includes(lower) ?? false;
|
||||
return labelMatch || descMatch;
|
||||
})
|
||||
.map((n) => n.id),
|
||||
);
|
||||
|
||||
if (seedIds.size === 0) {
|
||||
return { nodes: [], edges: [], term, budget };
|
||||
}
|
||||
|
||||
const result = applyBudget(graph, seedIds, budget);
|
||||
return { ...result, term, budget };
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// graphDiff
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Compare the current graph.json with .last-build-snapshot.json.
|
||||
* Returns added/removed/changed nodes and added/removed edges.
|
||||
*
|
||||
* If no snapshot exists, returns empty diff arrays.
|
||||
*/
|
||||
export async function graphDiff(projectDir: string): Promise<GraphDiffResult> {
|
||||
const gsdRoot = resolveGsdRoot(resolve(projectDir));
|
||||
const empty: GraphDiffResult = {
|
||||
nodes: { added: [], removed: [], changed: [] },
|
||||
edges: { added: [], removed: [] },
|
||||
};
|
||||
|
||||
const graphPath = graphJsonPath(gsdRoot);
|
||||
const snap = snapshotPath(gsdRoot);
|
||||
|
||||
if (!existsSync(graphPath)) return empty;
|
||||
if (!existsSync(snap)) return empty;
|
||||
|
||||
let current: KnowledgeGraph;
|
||||
let snapshot: KnowledgeGraph;
|
||||
|
||||
try {
|
||||
current = JSON.parse(readFileSync(graphPath, 'utf-8')) as KnowledgeGraph;
|
||||
} catch {
|
||||
return empty;
|
||||
}
|
||||
|
||||
try {
|
||||
snapshot = JSON.parse(readFileSync(snap, 'utf-8')) as KnowledgeGraph;
|
||||
} catch {
|
||||
return empty;
|
||||
}
|
||||
|
||||
const currentNodeIds = new Set(current.nodes.map((n) => n.id));
|
||||
const snapshotNodeIds = new Set(snapshot.nodes.map((n) => n.id));
|
||||
|
||||
const added = current.nodes.filter((n) => !snapshotNodeIds.has(n.id)).map((n) => n.id);
|
||||
const removed = snapshot.nodes.filter((n) => !currentNodeIds.has(n.id)).map((n) => n.id);
|
||||
|
||||
// Changed: same id but different label or description
|
||||
const snapshotNodeMap = new Map(snapshot.nodes.map((n) => [n.id, n]));
|
||||
const changed = current.nodes
|
||||
.filter((n) => {
|
||||
const snap = snapshotNodeMap.get(n.id);
|
||||
if (!snap) return false;
|
||||
return n.label !== snap.label || n.description !== snap.description;
|
||||
})
|
||||
.map((n) => n.id);
|
||||
|
||||
// Edges — compare by string key "from->to:type"
|
||||
const edgeKey = (e: GraphEdge): string => `${e.from}->${e.to}:${e.type}`;
|
||||
const currentEdgeKeys = new Set(current.edges.map(edgeKey));
|
||||
const snapshotEdgeKeys = new Set(snapshot.edges.map(edgeKey));
|
||||
|
||||
const edgesAdded = current.edges.filter((e) => !snapshotEdgeKeys.has(edgeKey(e))).map(edgeKey);
|
||||
const edgesRemoved = snapshot.edges.filter((e) => !currentEdgeKeys.has(edgeKey(e))).map(edgeKey);
|
||||
|
||||
return {
|
||||
nodes: { added, removed, changed },
|
||||
edges: { added: edgesAdded, removed: edgesRemoved },
|
||||
};
|
||||
}
|
||||
|
|
@ -14,3 +14,15 @@ export { readKnowledge } from './knowledge.js';
|
|||
export type { KnowledgeResult, KnowledgeEntry } from './knowledge.js';
|
||||
export { runDoctorLite } from './doctor-lite.js';
|
||||
export type { DoctorResult, DoctorIssue } from './doctor-lite.js';
|
||||
export { buildGraph, writeGraph, writeSnapshot, graphStatus, graphQuery, graphDiff } from './graph.js';
|
||||
export type {
|
||||
NodeType,
|
||||
EdgeType,
|
||||
ConfidenceTier,
|
||||
GraphNode,
|
||||
GraphEdge,
|
||||
KnowledgeGraph,
|
||||
GraphStatusResult,
|
||||
GraphQueryResult,
|
||||
GraphDiffResult,
|
||||
} from './graph.js';
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ import { readRoadmap } from './readers/roadmap.js';
|
|||
import { readHistory } from './readers/metrics.js';
|
||||
import { readCaptures } from './readers/captures.js';
|
||||
import { readKnowledge } from './readers/knowledge.js';
|
||||
import { buildGraph, writeGraph, writeSnapshot, graphStatus, graphQuery, graphDiff } from './readers/graph.js';
|
||||
import { resolveGsdRoot } from './readers/paths.js';
|
||||
import { runDoctorLite } from './readers/doctor-lite.js';
|
||||
import { registerWorkflowTools } from './workflow-tools.js';
|
||||
import { applySecrets, checkExistingEnvKeys, detectDestination } from './env-writer.js';
|
||||
|
|
@ -800,6 +802,87 @@ export async function createMcpServer(sessionManager: SessionManager): Promise<{
|
|||
},
|
||||
);
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// gsd_graph — knowledge graph for GSD projects
|
||||
//
|
||||
// Modes:
|
||||
// build Parse .gsd/ artifacts and write graph.json atomically.
|
||||
// query Search the graph for nodes matching a term (BFS, budget-trimmed).
|
||||
// status Check whether graph.json exists and whether it is stale (>24h).
|
||||
// diff Compare graph.json with the last build snapshot.
|
||||
// -----------------------------------------------------------------------
|
||||
server.tool(
|
||||
'gsd_graph',
|
||||
[
|
||||
'Manage the GSD project knowledge graph. No session required.',
|
||||
'',
|
||||
'Modes:',
|
||||
' build Parse .gsd/ artifacts (STATE.md, milestone ROADMAPs, slice PLANs,',
|
||||
' KNOWLEDGE.md) and write .gsd/graphs/graph.json atomically.',
|
||||
' query Search graph nodes by term (BFS from seed matches, budget-trimmed).',
|
||||
' Returns matching nodes and reachable edges within the token budget.',
|
||||
' status Show whether graph.json exists, its age, node/edge counts, and',
|
||||
' whether it is stale (built more than 24 hours ago).',
|
||||
' diff Compare current graph.json with .last-build-snapshot.json.',
|
||||
' Returns added, removed, and changed nodes and edges.',
|
||||
].join('\n'),
|
||||
{
|
||||
projectDir: z.string().describe('Absolute path to the project directory'),
|
||||
mode: z.enum(['build', 'query', 'status', 'diff']).describe(
|
||||
'Operation: build | query | status | diff',
|
||||
),
|
||||
term: z.string().optional().describe('Search term for query mode (case-insensitive)'),
|
||||
budget: z.number().optional().describe('Token budget for query mode (default: 4000)'),
|
||||
snapshot: z.boolean().optional().describe('Write snapshot before build (for future diff)'),
|
||||
},
|
||||
async (args: Record<string, unknown>) => {
|
||||
const { projectDir, mode, term, budget, snapshot } = args as {
|
||||
projectDir: string;
|
||||
mode: 'build' | 'query' | 'status' | 'diff';
|
||||
term?: string;
|
||||
budget?: number;
|
||||
snapshot?: boolean;
|
||||
};
|
||||
|
||||
try {
|
||||
const gsdRoot = resolveGsdRoot(projectDir);
|
||||
|
||||
switch (mode) {
|
||||
case 'build': {
|
||||
if (snapshot) {
|
||||
await writeSnapshot(gsdRoot).catch(() => { /* best-effort */ });
|
||||
}
|
||||
const graph = await buildGraph(projectDir);
|
||||
await writeGraph(gsdRoot, graph);
|
||||
return jsonContent({
|
||||
built: true,
|
||||
nodeCount: graph.nodes.length,
|
||||
edgeCount: graph.edges.length,
|
||||
builtAt: graph.builtAt,
|
||||
});
|
||||
}
|
||||
|
||||
case 'query': {
|
||||
const result = await graphQuery(projectDir, term ?? '', budget);
|
||||
return jsonContent(result);
|
||||
}
|
||||
|
||||
case 'status': {
|
||||
const result = await graphStatus(projectDir);
|
||||
return jsonContent(result);
|
||||
}
|
||||
|
||||
case 'diff': {
|
||||
const result = await graphDiff(projectDir);
|
||||
return jsonContent(result);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return errorContent(err instanceof Error ? err.message : String(err));
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
registerWorkflowTools(server);
|
||||
|
||||
return { server };
|
||||
|
|
|
|||
78
src/cli.ts
78
src/cli.ts
|
|
@ -172,6 +172,84 @@ if (cliFlags.messages[0] === 'update') {
|
|||
process.exit(0)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Graph subcommand — `gsd graph build|status|query|diff`
|
||||
// ---------------------------------------------------------------------------
|
||||
if (cliFlags.messages[0] === 'graph') {
|
||||
const sub = cliFlags.messages[1]
|
||||
const { buildGraph, writeGraph, graphStatus, graphQuery, graphDiff, resolveGsdRoot } = await import('@gsd-build/mcp-server')
|
||||
|
||||
const projectDir = process.cwd()
|
||||
const gsdRoot = resolveGsdRoot(projectDir)
|
||||
|
||||
if (!sub || sub === 'build') {
|
||||
try {
|
||||
const graph = await buildGraph(projectDir)
|
||||
await writeGraph(gsdRoot, graph)
|
||||
process.stdout.write(`Graph built: ${graph.nodes.length} nodes, ${graph.edges.length} edges\n`)
|
||||
} catch (err) {
|
||||
process.stderr.write(`[gsd] graph build failed: ${err instanceof Error ? err.message : String(err)}\n`)
|
||||
process.exit(1)
|
||||
}
|
||||
} else if (sub === 'status') {
|
||||
try {
|
||||
const result = await graphStatus(projectDir)
|
||||
if (!result.exists) {
|
||||
process.stdout.write('Graph: not built yet. Run: gsd graph build\n')
|
||||
} else {
|
||||
process.stdout.write(`Graph status:\n`)
|
||||
process.stdout.write(` exists: ${result.exists}\n`)
|
||||
process.stdout.write(` nodes: ${result.nodeCount}\n`)
|
||||
process.stdout.write(` edges: ${result.edgeCount}\n`)
|
||||
process.stdout.write(` stale: ${result.stale}\n`)
|
||||
process.stdout.write(` ageHours: ${result.ageHours !== undefined ? result.ageHours.toFixed(2) : 'n/a'}\n`)
|
||||
process.stdout.write(` lastBuild: ${result.lastBuild ?? 'n/a'}\n`)
|
||||
}
|
||||
} catch (err) {
|
||||
process.stderr.write(`[gsd] graph status failed: ${err instanceof Error ? err.message : String(err)}\n`)
|
||||
process.exit(1)
|
||||
}
|
||||
} else if (sub === 'query') {
|
||||
const term = cliFlags.messages[2]
|
||||
if (!term) {
|
||||
process.stderr.write('Usage: gsd graph query <term>\n')
|
||||
process.exit(1)
|
||||
}
|
||||
try {
|
||||
const result = await graphQuery(projectDir, term)
|
||||
if (result.nodes.length === 0) {
|
||||
process.stdout.write(`No nodes found for term: "${term}"\n`)
|
||||
} else {
|
||||
process.stdout.write(`Query results for "${term}" (${result.nodes.length} nodes, ${result.edges.length} edges):\n`)
|
||||
for (const node of result.nodes) {
|
||||
process.stdout.write(` [${node.type}] ${node.label} (${node.confidence})\n`)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
process.stderr.write(`[gsd] graph query failed: ${err instanceof Error ? err.message : String(err)}\n`)
|
||||
process.exit(1)
|
||||
}
|
||||
} else if (sub === 'diff') {
|
||||
try {
|
||||
const result = await graphDiff(projectDir)
|
||||
process.stdout.write(`Graph diff:\n`)
|
||||
process.stdout.write(` nodes added: ${result.nodes.added.length}\n`)
|
||||
process.stdout.write(` nodes removed: ${result.nodes.removed.length}\n`)
|
||||
process.stdout.write(` nodes changed: ${result.nodes.changed.length}\n`)
|
||||
process.stdout.write(` edges added: ${result.edges.added.length}\n`)
|
||||
process.stdout.write(` edges removed: ${result.edges.removed.length}\n`)
|
||||
} catch (err) {
|
||||
process.stderr.write(`[gsd] graph diff failed: ${err instanceof Error ? err.message : String(err)}\n`)
|
||||
process.exit(1)
|
||||
}
|
||||
} else {
|
||||
process.stderr.write(`Unknown graph command: ${sub}\n`)
|
||||
process.stderr.write('Commands: build, status, query <term>, diff\n')
|
||||
process.exit(1)
|
||||
}
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
exitIfManagedResourcesAreNewer(agentDir)
|
||||
|
||||
// Early TTY check — must come before heavy initialization to avoid dangling
|
||||
|
|
|
|||
|
|
@ -91,6 +91,29 @@ const SUBCOMMAND_HELP: Record<string, string> = {
|
|||
' gsd worktree remove old-branch --force Remove even with unmerged changes',
|
||||
].join('\n'),
|
||||
|
||||
graph: [
|
||||
'Usage: gsd graph <subcommand> [options]',
|
||||
'',
|
||||
'Manage the GSD project knowledge graph. Reads .gsd/ artifacts and builds',
|
||||
'a queryable graph of milestones, slices, tasks, rules, patterns, and lessons.',
|
||||
'',
|
||||
'Subcommands:',
|
||||
' build Parse .gsd/ artifacts (STATE.md, milestone ROADMAPs, slice PLANs,',
|
||||
' KNOWLEDGE.md) and write .gsd/graphs/graph.json atomically.',
|
||||
' query Search graph nodes by term (BFS from seed matches, budget-trimmed).',
|
||||
' Returns matching nodes and reachable edges within the token budget.',
|
||||
' status Show whether graph.json exists, its age, node/edge counts, and',
|
||||
' whether it is stale (built more than 24 hours ago).',
|
||||
' diff Compare current graph.json with .last-build-snapshot.json.',
|
||||
' Returns added, removed, and changed nodes and edges.',
|
||||
'',
|
||||
'Examples:',
|
||||
' gsd graph build Build the graph from .gsd/ artifacts',
|
||||
' gsd graph status Check graph age and node/edge counts',
|
||||
' gsd graph query auth Find nodes related to "auth"',
|
||||
' gsd graph diff Show changes since last snapshot',
|
||||
].join('\n'),
|
||||
|
||||
headless: [
|
||||
'Usage: gsd headless [flags] [command] [args...]',
|
||||
'',
|
||||
|
|
@ -174,6 +197,7 @@ export function printHelp(version: string): void {
|
|||
process.stdout.write(' worktree <cmd> Manage worktrees (list, merge, clean, remove)\n')
|
||||
process.stdout.write(' auto [args] Run auto-mode without TUI (pipeable)\n')
|
||||
process.stdout.write(' headless [cmd] [args] Run /gsd commands without TUI (default: auto)\n')
|
||||
process.stdout.write(' graph <subcommand> Manage knowledge graph (build, query, status, diff)\n')
|
||||
process.stdout.write('\nRun gsd <subcommand> --help for subcommand-specific help.\n')
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ import {
|
|||
import { formatDecisionsCompact, formatRequirementsCompact } from "./structured-data-formatter.js";
|
||||
import { readPhaseAnchor, formatAnchorForPrompt } from "./phase-anchor.js";
|
||||
import { logWarning } from "./workflow-logger.js";
|
||||
import { inlineGraphSubgraph } from "./graph-context.js";
|
||||
|
||||
// ─── Preamble Cap ─────────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -1175,6 +1176,10 @@ export async function buildResearchSlicePrompt(
|
|||
const knowledgeInlineRS = await inlineKnowledgeScoped(base, keywords);
|
||||
if (knowledgeInlineRS) inlined.push(knowledgeInlineRS);
|
||||
|
||||
// Knowledge graph: subgraph for this slice (graceful — skipped if no graph.json)
|
||||
const graphBlockRS = await inlineGraphSubgraph(base, `${sid} ${sTitle}`, { budget: 3000 });
|
||||
if (graphBlockRS) inlined.push(graphBlockRS);
|
||||
|
||||
inlined.push(inlineTemplate("research", "Research"));
|
||||
|
||||
const depContent = await inlineDependencySummaries(mid, sid, base);
|
||||
|
|
@ -1250,6 +1255,10 @@ export async function buildPlanSlicePrompt(
|
|||
const knowledgeInlinePS = await inlineKnowledgeScoped(base, keywordsPS);
|
||||
if (knowledgeInlinePS) inlined.push(knowledgeInlinePS);
|
||||
|
||||
// Knowledge graph: subgraph for this slice (graceful — skipped if no graph.json)
|
||||
const graphBlockPS = await inlineGraphSubgraph(base, `${sid} ${sTitle}`, { budget: 3000 });
|
||||
if (graphBlockPS) inlined.push(graphBlockPS);
|
||||
|
||||
inlined.push(inlineTemplate("plan", "Slice Plan"));
|
||||
if (inlineLevel === "full") {
|
||||
inlined.push(inlineTemplate("task-plan", "Task Plan"));
|
||||
|
|
@ -1366,12 +1375,16 @@ export async function buildExecuteTaskPrompt(
|
|||
// Only include if it has content (not a "not found" result)
|
||||
const knowledgeContent = knowledgeInlineET && !knowledgeInlineET.includes("not found") ? knowledgeInlineET : null;
|
||||
|
||||
// Knowledge graph: tight subgraph for this task (graceful — skipped if no graph.json)
|
||||
const graphBlockET = await inlineGraphSubgraph(base, `${tid} ${tTitle}`, { budget: 2000 });
|
||||
|
||||
const inlinedTemplates = inlineLevel === "minimal"
|
||||
? inlineTemplate("task-summary", "Task Summary")
|
||||
: [
|
||||
inlineTemplate("task-summary", "Task Summary"),
|
||||
inlineTemplate("decisions", "Decisions"),
|
||||
...(knowledgeContent ? [knowledgeContent] : []),
|
||||
...(graphBlockET ? [graphBlockET] : []),
|
||||
].join("\n\n---\n\n");
|
||||
|
||||
const taskSummaryPath = join(base, `${relSlicePath(base, mid, sid)}/tasks/${tid}-SUMMARY.md`);
|
||||
|
|
|
|||
85
src/resources/extensions/gsd/graph-context.ts
Normal file
85
src/resources/extensions/gsd/graph-context.ts
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
/**
|
||||
* Graph-aware context injection for dispatch prompt builders.
|
||||
*
|
||||
* Reads the pre-built graph.json and returns a formatted context block
|
||||
* for injection into prompts. Gracefully returns null when no graph exists
|
||||
* or the query yields no results — callers must handle null.
|
||||
*/
|
||||
|
||||
import { logWarning } from "./workflow-logger.js";
|
||||
import type { GraphQueryResult, GraphStatusResult } from "@gsd-build/mcp-server";
|
||||
|
||||
export interface GraphSubgraphOptions {
|
||||
/** Budget in tokens passed to graphQuery (1 node ≈ 20 tokens, 1 edge ≈ 10 tokens) */
|
||||
budget: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Query the knowledge graph for nodes related to the given term and format
|
||||
* the result as an inlined context block.
|
||||
*
|
||||
* Returns null when:
|
||||
* - @gsd-build/mcp-server fails to import
|
||||
* - graph.json does not exist (graphQuery already handles this gracefully)
|
||||
* - query returns zero nodes
|
||||
*
|
||||
* Annotates the block header when the graph is stale (> 24 hours old).
|
||||
*/
|
||||
export async function inlineGraphSubgraph(
|
||||
projectDir: string,
|
||||
term: string,
|
||||
opts: GraphSubgraphOptions,
|
||||
): Promise<string | null> {
|
||||
if (!term || !term.trim()) return null;
|
||||
|
||||
try {
|
||||
const { graphQuery, graphStatus } = await import("@gsd-build/mcp-server") as {
|
||||
graphQuery: (projectDir: string, term: string, budget?: number) => Promise<GraphQueryResult>;
|
||||
graphStatus: (projectDir: string) => Promise<GraphStatusResult>;
|
||||
};
|
||||
|
||||
const result = await graphQuery(projectDir, term, opts.budget);
|
||||
if (result.nodes.length === 0) return null;
|
||||
|
||||
// Check staleness for annotation
|
||||
let staleAnnotation = "";
|
||||
try {
|
||||
const status = await graphStatus(projectDir);
|
||||
if (status.exists && status.stale && status.ageHours !== undefined) {
|
||||
const hours = Math.round(status.ageHours);
|
||||
staleAnnotation = `\n> ⚠ Graph last built ${hours}h ago — context may be outdated`;
|
||||
}
|
||||
} catch {
|
||||
// Non-fatal — skip annotation on error
|
||||
}
|
||||
|
||||
// Format nodes as a compact list
|
||||
const nodeLines = result.nodes.map((n) => {
|
||||
const desc = n.description ? ` — ${n.description}` : "";
|
||||
return `- **${n.label}** (\`${n.type}\`, ${n.confidence})${desc}`;
|
||||
});
|
||||
|
||||
// Format edges as relations (only if present)
|
||||
const edgeLines = result.edges.length > 0
|
||||
? result.edges.map((e) => `- \`${e.from}\` →[${e.type}]→ \`${e.to}\``)
|
||||
: [];
|
||||
|
||||
const sections: string[] = [
|
||||
`### Knowledge Graph Context (term: "${term}")`,
|
||||
`Source: \`.gsd/graphs/graph.json\``,
|
||||
staleAnnotation,
|
||||
"",
|
||||
`**Nodes (${result.nodes.length}):**`,
|
||||
...nodeLines,
|
||||
];
|
||||
|
||||
if (edgeLines.length > 0) {
|
||||
sections.push("", `**Relations (${result.edges.length}):**`, ...edgeLines);
|
||||
}
|
||||
|
||||
return sections.filter((l) => l !== undefined).join("\n");
|
||||
} catch (err) {
|
||||
logWarning("prompt", `inlineGraphSubgraph failed (non-fatal): ${err instanceof Error ? err.message : String(err)}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
337
src/resources/extensions/gsd/tests/graph-context.test.ts
Normal file
337
src/resources/extensions/gsd/tests/graph-context.test.ts
Normal file
|
|
@ -0,0 +1,337 @@
|
|||
/**
|
||||
* graph-context.test.ts — Unit tests for inlineGraphSubgraph().
|
||||
*
|
||||
* Covers:
|
||||
* Group 1: Null-return paths (empty term, zero nodes, missing graph.json)
|
||||
* Group 2: Correct output formatting (nodes, edges, stale annotation)
|
||||
* Group 3: Node formatting (description, confidence, no-description)
|
||||
*
|
||||
* Testing strategy:
|
||||
* @gsd-build/mcp-server is dynamically imported inside inlineGraphSubgraph().
|
||||
* Because node:test (v22) does not support mock.module() without the
|
||||
* --experimental-test-module-mocks flag (not enabled in test:unit), we
|
||||
* exercise the real graphQuery/graphStatus functions by controlling the
|
||||
* on-disk graph.json that those functions read. This is a clean, deterministic
|
||||
* approach that avoids all module-level mocking.
|
||||
*
|
||||
* Fixture layout per test:
|
||||
* <tmpDir>/.gsd/graphs/graph.json
|
||||
*
|
||||
* builtAt controls staleness: old timestamp → stale, recent → fresh.
|
||||
*/
|
||||
|
||||
import { describe, it } from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
import { mkdtempSync, mkdirSync, writeFileSync, rmSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { tmpdir } from "node:os";
|
||||
|
||||
import { inlineGraphSubgraph } from "../graph-context.ts";
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
interface TestNode {
|
||||
id: string;
|
||||
label: string;
|
||||
type: string;
|
||||
confidence: string;
|
||||
description?: string;
|
||||
sourceFile?: string;
|
||||
}
|
||||
|
||||
interface TestEdge {
|
||||
from: string;
|
||||
to: string;
|
||||
type: string;
|
||||
confidence: string;
|
||||
}
|
||||
|
||||
interface GraphFixture {
|
||||
nodes: TestNode[];
|
||||
edges: TestEdge[];
|
||||
/** ISO timestamp for graph.builtAt. Controls staleness. Default: recent (not stale). */
|
||||
builtAt?: string;
|
||||
}
|
||||
|
||||
/** Returns an ISO timestamp that is stale (> 24h ago). */
|
||||
function staleTimestamp(hoursAgo = 26): string {
|
||||
return new Date(Date.now() - hoursAgo * 60 * 60 * 1000).toISOString();
|
||||
}
|
||||
|
||||
/** Returns an ISO timestamp that is fresh (< 24h ago). */
|
||||
function freshTimestamp(): string {
|
||||
return new Date(Date.now() - 30 * 60 * 1000).toISOString(); // 30 minutes ago
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a temp project directory with a .gsd/graphs/graph.json file.
|
||||
* Returns the projectDir path. Caller is responsible for cleanup.
|
||||
*/
|
||||
function makeProjectDir(fixture: GraphFixture): string {
|
||||
const projectDir = mkdtempSync(join(tmpdir(), "graph-ctx-test-"));
|
||||
const gsdDir = join(projectDir, ".gsd");
|
||||
const graphsDir = join(gsdDir, "graphs");
|
||||
mkdirSync(graphsDir, { recursive: true });
|
||||
|
||||
const graph = {
|
||||
nodes: fixture.nodes,
|
||||
edges: fixture.edges,
|
||||
builtAt: fixture.builtAt ?? freshTimestamp(),
|
||||
};
|
||||
|
||||
writeFileSync(join(graphsDir, "graph.json"), JSON.stringify(graph), "utf-8");
|
||||
return projectDir;
|
||||
}
|
||||
|
||||
/** Removes a temp directory, suppressing errors on Windows. */
|
||||
function cleanup(dir: string): void {
|
||||
try { rmSync(dir, { recursive: true, force: true }); } catch { /* ignore */ }
|
||||
}
|
||||
|
||||
/** Minimal node factory. */
|
||||
function makeNode(overrides: Partial<TestNode> & { id: string; label: string }): TestNode {
|
||||
return {
|
||||
type: "CLASS",
|
||||
confidence: "INFERRED",
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
/** Minimal edge factory. */
|
||||
function makeEdge(overrides: Partial<TestEdge> & { from: string; to: string }): TestEdge {
|
||||
return {
|
||||
type: "CALLS",
|
||||
confidence: "INFERRED",
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Group 1: Null returns ────────────────────────────────────────────────────
|
||||
|
||||
describe("inlineGraphSubgraph — null returns", () => {
|
||||
it("returns null immediately for empty string term", async () => {
|
||||
// No graph.json needed — exits before any file I/O
|
||||
const result = await inlineGraphSubgraph("/tmp/nonexistent", "", { budget: 3000 });
|
||||
assert.strictEqual(result, null);
|
||||
});
|
||||
|
||||
it("returns null for whitespace-only term", async () => {
|
||||
const result = await inlineGraphSubgraph("/tmp/nonexistent", " ", { budget: 3000 });
|
||||
assert.strictEqual(result, null);
|
||||
});
|
||||
|
||||
it("returns null when graphQuery returns zero nodes (no matching term in graph)", async () => {
|
||||
const projectDir = makeProjectDir({
|
||||
nodes: [makeNode({ id: "n1", label: "AuthService" })],
|
||||
edges: [],
|
||||
});
|
||||
try {
|
||||
// "zzznomatch999" is intentionally absent from the fixture
|
||||
const result = await inlineGraphSubgraph(projectDir, "zzznomatch999", { budget: 3000 });
|
||||
assert.strictEqual(result, null);
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
|
||||
it("returns null (no throw) when graph.json is missing", async () => {
|
||||
// A project dir with no .gsd directory at all — graphQuery returns zero nodes
|
||||
const projectDir = mkdtempSync(join(tmpdir(), "graph-ctx-nofile-"));
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "auth", { budget: 3000 });
|
||||
assert.strictEqual(result, null);
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Group 2: Correct output formatting ──────────────────────────────────────
|
||||
|
||||
describe("inlineGraphSubgraph — correct output", () => {
|
||||
it("returns block with section header and node labels when term matches", async () => {
|
||||
const projectDir = makeProjectDir({
|
||||
nodes: [
|
||||
makeNode({ id: "n1", label: "UserService" }),
|
||||
makeNode({ id: "n2", label: "UserRepository" }),
|
||||
],
|
||||
edges: [],
|
||||
});
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "User", { budget: 3000 });
|
||||
assert.ok(result !== null, "result should not be null");
|
||||
assert.ok(result!.includes("### Knowledge Graph Context"), "should include section header");
|
||||
assert.ok(result!.includes("UserService"), "should include first node label");
|
||||
assert.ok(result!.includes("UserRepository"), "should include second node label");
|
||||
assert.ok(result!.includes("Nodes (2)"), "should show node count");
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
|
||||
it("does not include Relations section when edges array is empty", async () => {
|
||||
const projectDir = makeProjectDir({
|
||||
nodes: [makeNode({ id: "n1", label: "AuthController" })],
|
||||
edges: [],
|
||||
});
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "Auth", { budget: 3000 });
|
||||
assert.ok(result !== null, "result should not be null");
|
||||
assert.ok(!result!.includes("Relations"), "should not include Relations section for zero edges");
|
||||
assert.ok(!result!.includes("⚠"), "should not include stale warning for fresh graph");
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
|
||||
it("includes Relations section when edges are present", async () => {
|
||||
const projectDir = makeProjectDir({
|
||||
nodes: [
|
||||
makeNode({ id: "n1", label: "AuthService" }),
|
||||
makeNode({ id: "n2", label: "UserRepo" }),
|
||||
],
|
||||
edges: [makeEdge({ from: "n1", to: "n2", type: "CALLS" })],
|
||||
});
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "Auth", { budget: 3000 });
|
||||
assert.ok(result !== null, "result should not be null");
|
||||
assert.ok(result!.includes("Relations (1)"), "should show edge count");
|
||||
assert.ok(result!.includes("→[CALLS]→"), "should include edge type in arrow notation");
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
|
||||
it("includes stale annotation when graph was built more than 24h ago", async () => {
|
||||
const projectDir = makeProjectDir({
|
||||
nodes: [makeNode({ id: "n1", label: "AuthService" })],
|
||||
edges: [],
|
||||
builtAt: staleTimestamp(26), // 26 hours ago → stale
|
||||
});
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "Auth", { budget: 3000 });
|
||||
assert.ok(result !== null, "result should not be null");
|
||||
assert.ok(result!.includes("⚠ Graph last built"), "should include stale annotation");
|
||||
assert.ok(result!.includes("h ago"), "should include hours-ago text");
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
|
||||
it("does not include stale annotation for a fresh graph", async () => {
|
||||
const projectDir = makeProjectDir({
|
||||
nodes: [makeNode({ id: "n1", label: "AuthService" })],
|
||||
edges: [],
|
||||
builtAt: freshTimestamp(), // 30 minutes ago → not stale
|
||||
});
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "Auth", { budget: 3000 });
|
||||
assert.ok(result !== null, "result should not be null");
|
||||
assert.ok(!result!.includes("⚠"), "should not include stale annotation for fresh graph");
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
|
||||
it("returns valid block even when graph.json has corrupted builtAt (graphStatus throws internally)", async () => {
|
||||
// Write a graph.json with an invalid builtAt — graphStatus will catch and return {exists: false}
|
||||
// inlineGraphSubgraph should still return the node block without stale annotation
|
||||
const projectDir = mkdtempSync(join(tmpdir(), "graph-ctx-corrupt-"));
|
||||
const gsdDir = join(projectDir, ".gsd");
|
||||
const graphsDir = join(gsdDir, "graphs");
|
||||
mkdirSync(graphsDir, { recursive: true });
|
||||
|
||||
const graph = {
|
||||
nodes: [{ id: "n1", label: "AuthController", type: "CLASS", confidence: "INFERRED" }],
|
||||
edges: [],
|
||||
builtAt: "NOT-A-DATE", // invalid ISO — will cause Date.now() - NaN to produce NaN
|
||||
};
|
||||
writeFileSync(join(graphsDir, "graph.json"), JSON.stringify(graph), "utf-8");
|
||||
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "Auth", { budget: 3000 });
|
||||
// graphQuery reads the file and finds the node; graphStatus may return {exists: true, stale: false/true}
|
||||
// Either way, function must not throw and must return a string with node content
|
||||
assert.ok(result !== null, "result should not be null");
|
||||
assert.ok(result!.includes("AuthController"), "should include node label");
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
|
||||
it("passes the budget option to graphQuery (enforces node count limit)", async () => {
|
||||
// Each node uses ~20 tokens. With budget=20, only ~1 node should be returned.
|
||||
// Build a graph with many nodes all matching the same term.
|
||||
const nodes: TestNode[] = Array.from({ length: 10 }, (_, i) =>
|
||||
makeNode({ id: `n${i}`, label: `AuthModule${i}` })
|
||||
);
|
||||
const projectDir = makeProjectDir({ nodes, edges: [] });
|
||||
try {
|
||||
const resultSmall = await inlineGraphSubgraph(projectDir, "Auth", { budget: 20 });
|
||||
const resultLarge = await inlineGraphSubgraph(projectDir, "Auth", { budget: 10000 });
|
||||
|
||||
// Both should return something (at least 1 node matches)
|
||||
assert.ok(resultSmall !== null, "small-budget result should not be null");
|
||||
assert.ok(resultLarge !== null, "large-budget result should not be null");
|
||||
|
||||
// With a very small budget (20 tokens ≈ 1 node), fewer nodes should appear
|
||||
const smallNodeCount = (resultSmall!.match(/- \*\*/g) || []).length;
|
||||
const largeNodeCount = (resultLarge!.match(/- \*\*/g) || []).length;
|
||||
assert.ok(
|
||||
smallNodeCount <= largeNodeCount,
|
||||
`small-budget should return <= nodes than large-budget (got ${smallNodeCount} vs ${largeNodeCount})`,
|
||||
);
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Group 3: Node formatting ─────────────────────────────────────────────────
|
||||
|
||||
describe("inlineGraphSubgraph — node formatting", () => {
|
||||
it("includes description after em-dash when node has description", async () => {
|
||||
const projectDir = makeProjectDir({
|
||||
nodes: [makeNode({ id: "n1", label: "JwtValidator", description: "JWT validation" })],
|
||||
edges: [],
|
||||
});
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "Jwt", { budget: 3000 });
|
||||
assert.ok(result !== null, "result should not be null");
|
||||
assert.ok(result!.includes("— JWT validation"), "should include description after em-dash");
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
|
||||
it("omits em-dash suffix when node has no description", async () => {
|
||||
const projectDir = makeProjectDir({
|
||||
nodes: [makeNode({ id: "n1", label: "TokenStore" })], // no description
|
||||
edges: [],
|
||||
});
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "Token", { budget: 3000 });
|
||||
assert.ok(result !== null, "result should not be null");
|
||||
const lines = result!.split("\n");
|
||||
const nodeLine = lines.find((l) => l.includes("TokenStore"));
|
||||
assert.ok(nodeLine !== undefined, "node line should be present");
|
||||
assert.ok(!nodeLine.includes("—"), "node line should not include em-dash when no description");
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
|
||||
it("includes confidence tier in the node output line", async () => {
|
||||
const projectDir = makeProjectDir({
|
||||
nodes: [makeNode({ id: "n1", label: "AuthService", confidence: "EXTRACTED" })],
|
||||
edges: [],
|
||||
});
|
||||
try {
|
||||
const result = await inlineGraphSubgraph(projectDir, "Auth", { budget: 3000 });
|
||||
assert.ok(result !== null, "result should not be null");
|
||||
assert.ok(result!.includes("EXTRACTED"), "should include the confidence tier in node line");
|
||||
} finally {
|
||||
cleanup(projectDir);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
@ -424,6 +424,25 @@ export async function handleCompleteSlice(
|
|||
logError("tool", `complete-slice event log FAILED — completion invisible to reconciliation`, { error: (eventErr as Error).message });
|
||||
}
|
||||
|
||||
// Fire-and-forget graph rebuild — must NOT await, must NOT crash slice completion.
|
||||
// Dynamic import of the package name (not a relative path) so it resolves
|
||||
// correctly via package.json#exports in both development and production.
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
(async () => {
|
||||
try {
|
||||
const graphMod = await import("@gsd-build/mcp-server") as {
|
||||
buildGraph: (dir: string) => Promise<{ nodes: unknown[]; edges: unknown[]; builtAt: string }>;
|
||||
writeGraph: (gsdRoot: string, graph: unknown) => Promise<void>;
|
||||
resolveGsdRoot: (basePath: string) => string;
|
||||
};
|
||||
const g = await graphMod.buildGraph(basePath);
|
||||
await graphMod.writeGraph(graphMod.resolveGsdRoot(basePath), g);
|
||||
} catch (graphErr) {
|
||||
// Graph rebuild is best-effort — log at warning level but never propagate
|
||||
logWarning("tool", `complete-slice graph rebuild failed (non-fatal): ${(graphErr as Error).message ?? String(graphErr)}`);
|
||||
}
|
||||
})();
|
||||
|
||||
return {
|
||||
sliceId: params.sliceId,
|
||||
milestoneId: params.milestoneId,
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue