singularity-forge/src/resources/extensions/sf/codebase-generator.js
Mikael Hugo 48dbb175c0 feat(prefs): migrate canonical preferences file from PREFERENCES.md to preferences.yaml
New installations create .sf/preferences.yaml (pure YAML, no frontmatter
markers) and ~/.sf/preferences.yaml. Existing .md files are read as fallbacks
with no migration required for current users.

Changes:
- preferences.js: add yaml path getters, load chain tries .yaml first, add
  parsePreferencesYaml() for direct YAML parse without frontmatter extraction
- templates/preferences.yaml: new canonical template (pure YAML with comment
  header pointing to preferences-reference.md)
- gitignore.js: ensurePreferences() creates preferences.yaml; simplified by
  removing scaffold-versioning dependency
- init-wizard.js: buildPreferencesFile() produces pure YAML, writes preferences.yaml
- commands-prefs-wizard.js: savePreferencesFile() helper handles .yaml vs .md;
  ensurePreferencesFile uses yaml template for yaml paths
- preferences-template-upgrade.js: yaml files get raw YAML on upgrade
- planning-depth.js: returns {path, isYaml}, handles both formats
- deep-project-setup-policy.js: isWorkflowPrefsCaptured() tries all 3 paths
- detection.js: preferences.yaml added to all detection checks
- auto-worktree.js: canonical=yaml, LEGACY_PREFERENCES_FILES=["PREFERENCES.md","preferences.md"]
- auto-bootstrap-context.js: preferences.yaml before PREFERENCES.md in list
- guided-flow.js / worktree-root.js: existence checks include preferences.yaml
- User-visible strings / comments updated throughout

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-05-10 21:05:10 +02:00

777 lines
23 KiB
JavaScript

/**
* SF Codebase Map Generator
*
* Produces .sf/CODEBASE.md — a structural table of contents for the project.
* Gives fresh agent contexts instant orientation without filesystem exploration.
*
* Generation: walk `git ls-files`, group by directory, output with descriptions.
* Maintenance: agent updates descriptions as it works; incremental update preserves them.
*/
import { execSync } from "node:child_process";
import { createHash } from "node:crypto";
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { dirname, extname, join } from "node:path";
import { sfRoot } from "./paths.js";
// ─── Defaults ────────────────────────────────────────────────────────────────
const DEFAULT_EXCLUDES = [
// ── AI / tooling meta ──
".agents/",
".sf/",
".planning/",
".plans/",
".claude/",
".cursor/",
".bg-shell/",
// ── Editor / IDE ──
".vscode/",
".idea/",
// ── VCS ──
".git/",
// ── Dependencies & build artifacts ──
"node_modules/",
"dist/",
"build/",
".next/",
"coverage/",
"__pycache__/",
".venv/",
"venv/",
"vendor/",
"target/",
// ── Misc ──
".cache/",
"tmp/",
];
const DEFAULT_MAX_FILES = 500;
const DEFAULT_COLLAPSE_THRESHOLD = 20;
const DEFAULT_REFRESH_TTL_MS = 30_000;
const DEFAULT_MAX_AGE_MS = 15 * 60_000;
const CODEBASE_METADATA_PREFIX = "<!-- sf:codebase-meta ";
const freshnessCache = new Map();
// ─── Parsing ─────────────────────────────────────────────────────────────────
/**
* Parse an existing CODEBASE.md to extract file → description mappings.
* Also scans <!-- sf:collapsed-descriptions --> comment blocks to preserve
* descriptions for files in collapsed directories across incremental updates.
*/
export function parseCodebaseMap(content) {
const descriptions = new Map();
let inCollapsedBlock = false;
for (const line of content.split("\n")) {
// Track collapsed-description comment blocks
if (line.trimStart().startsWith("<!-- sf:collapsed-descriptions")) {
inCollapsedBlock = true;
continue;
}
if (inCollapsedBlock && line.trimStart().startsWith("-->")) {
inCollapsedBlock = false;
continue;
}
// Match: - `path/to/file.ts` — Description here
const match = line.match(/^- `(.+?)` — (.+)$/);
if (match) {
descriptions.set(match[1], match[2]);
continue;
}
// Match: - `path/to/file.ts` (no description) — only outside collapsed blocks
if (!inCollapsedBlock) {
const bareMatch = line.match(/^- `(.+?)`\s*$/);
if (bareMatch) {
descriptions.set(bareMatch[1], "");
}
}
}
return descriptions;
}
export function parseCodebaseMapMetadata(content) {
const metaLine = content
.split("\n")
.find((line) => line.trimStart().startsWith(CODEBASE_METADATA_PREFIX));
if (!metaLine) return null;
const trimmed = metaLine.trim();
const jsonStart = CODEBASE_METADATA_PREFIX.length;
const jsonEnd = trimmed.lastIndexOf(" -->");
if (jsonEnd <= jsonStart) return null;
try {
const parsed = JSON.parse(trimmed.slice(jsonStart, jsonEnd));
if (
typeof parsed?.generatedAt === "string" &&
typeof parsed?.fingerprint === "string" &&
typeof parsed?.fileCount === "number" &&
typeof parsed?.truncated === "boolean"
) {
return parsed;
}
} catch {
// Ignore malformed metadata and treat the map as stale.
}
return null;
}
// ─── File Enumeration ────────────────────────────────────────────────────────
function shouldExclude(filePath, excludes) {
for (const pattern of excludes) {
if (pattern.endsWith("/")) {
if (filePath.startsWith(pattern) || filePath.includes(`/${pattern}`))
return true;
} else if (filePath === pattern || filePath.endsWith(`/${pattern}`)) {
return true;
}
}
// Skip binary/lock files
const ext = extname(filePath).toLowerCase();
if (
[
".lock",
".png",
".jpg",
".jpeg",
".gif",
".ico",
".woff",
".woff2",
".ttf",
".eot",
".svg",
].includes(ext)
) {
return true;
}
return false;
}
function lsFiles(basePath) {
try {
// stdio: "pipe" captures stderr into the thrown Error instead of
// inheriting it to the parent. Without it, running sf from a non-repo
// cwd (e.g. `$HOME`) leaks a "fatal: not a git repository" line to the
// user's terminal before the catch silently falls through to [].
const result = execSync("git ls-files", {
cwd: basePath,
encoding: "utf-8",
timeout: 10000,
stdio: ["ignore", "pipe", "pipe"],
});
return result.split("\n").filter(Boolean);
} catch {
return [];
}
}
/**
* Enumerate tracked files, applying exclusions and the maxFiles cap.
* Returns both the file list and whether truncation occurred.
*/
function enumerateFiles(basePath, excludes, maxFiles) {
const allFiles = lsFiles(basePath);
const filtered = allFiles.filter((f) => !shouldExclude(f, excludes));
const truncated = filtered.length > maxFiles;
return {
files: truncated ? filtered.slice(0, maxFiles) : filtered,
truncated,
};
}
function resolveGeneratorOptions(options) {
const excludes = [...DEFAULT_EXCLUDES, ...(options?.excludePatterns ?? [])];
const maxFiles = options?.maxFiles ?? DEFAULT_MAX_FILES;
const collapseThreshold =
options?.collapseThreshold ?? DEFAULT_COLLAPSE_THRESHOLD;
return {
excludes,
maxFiles,
collapseThreshold,
optionSignature: JSON.stringify({
excludes,
maxFiles,
collapseThreshold,
}),
};
}
function computeCodebaseFingerprint(files, resolved, truncated) {
return createHash("sha1")
.update(
JSON.stringify({
files,
truncated,
optionSignature: resolved.optionSignature,
}),
)
.digest("hex");
}
// ─── Grouping ────────────────────────────────────────────────────────────────
function groupByDirectory(files, descriptions, collapseThreshold) {
const dirMap = new Map();
for (const file of files) {
const dir = dirname(file);
const dirKey = dir === "." ? "" : dir;
if (!dirMap.has(dirKey)) {
dirMap.set(dirKey, []);
}
dirMap.get(dirKey).push({
path: file,
description: descriptions.get(file) ?? "",
});
}
const groups = [];
const sortedDirs = [...dirMap.keys()].sort();
for (const dir of sortedDirs) {
const dirFiles = dirMap.get(dir);
dirFiles.sort((a, b) => a.path.localeCompare(b.path));
groups.push({
path: dir,
files: dirFiles,
collapsed: dirFiles.length > collapseThreshold,
});
}
return groups;
}
function hasFile(files, fileName) {
return (
files.includes(fileName) ||
files.some((file) => file.endsWith(`/${fileName}`))
);
}
function hasDir(files, dirName) {
const prefix = dirName.endsWith("/") ? dirName : `${dirName}/`;
return files.some(
(file) => file.startsWith(prefix) || file.includes(`/${prefix}`),
);
}
function hasExt(files, extensions) {
const wanted = new Set(extensions);
return files.some((file) => wanted.has(extname(file).toLowerCase()));
}
function hasTestFile(files) {
return files.some(
(file) =>
/(^|\/)(test|tests|spec|__tests__)(\/|$)/i.test(file) ||
/\.(test|spec)\.[cm]?[jt]sx?$/i.test(file) ||
/_test\.go$/i.test(file) ||
/test_.*\.py$/i.test(file) ||
/_spec\.rb$/i.test(file),
);
}
function pushUnique(target, value) {
if (!target.includes(value)) target.push(value);
}
function inferProjectKnowledge(files) {
const stackSignals = [];
const criticalPathHints = [];
const verificationCommands = [];
const skillNeeds = [];
const knowledgeGaps = [];
if (hasFile(files, "package.json")) {
pushUnique(stackSignals, "Node.js package manifest present");
pushUnique(
verificationCommands,
"npm test or the package.json test script",
);
if (hasFile(files, "tsconfig.json") || hasExt(files, [".ts", ".tsx"])) {
pushUnique(stackSignals, "TypeScript source detected");
pushUnique(skillNeeds, "TypeScript/Node project maintenance");
} else {
pushUnique(skillNeeds, "JavaScript/Node project maintenance");
}
}
if (hasFile(files, "go.mod")) {
pushUnique(stackSignals, "Go module present");
pushUnique(verificationCommands, "go test ./...");
pushUnique(skillNeeds, "Go service development and testing");
}
if (hasFile(files, "Cargo.toml")) {
pushUnique(stackSignals, "Rust crate/workspace manifest present");
pushUnique(verificationCommands, "cargo test");
pushUnique(skillNeeds, "Rust implementation and ownership review");
}
if (hasFile(files, "pyproject.toml") || hasFile(files, "requirements.txt")) {
// Distinguish package manager so the agent gets accurate context for
// what `pytest` and friends should be prefixed with (uv run / poetry run).
const pyManager = hasFile(files, "uv.lock")
? "uv-managed"
: hasFile(files, "poetry.lock")
? "poetry-managed"
: hasFile(files, "pdm.lock")
? "pdm-managed"
: hasFile(files, "pyproject.toml")
? "pip/pyproject-managed"
: "pip/requirements-managed";
pushUnique(stackSignals, `Python project (${pyManager})`);
// Surface configured Python tools so the agent knows what verification
// stack actually exists. Config-file presence is the cheap signal;
// for [tool.X] sections in pyproject.toml see detection.pyprojectHasTool.
const pyTools = [];
if (hasFile(files, "ruff.toml") || hasFile(files, ".ruff.toml")) {
pyTools.push("ruff");
}
if (hasFile(files, "mypy.ini") || hasFile(files, ".mypy.ini")) {
pyTools.push("mypy");
}
if (hasFile(files, "pyrightconfig.json")) {
pyTools.push("pyright");
}
if (pyTools.length > 0) {
pushUnique(
stackSignals,
`Python tooling configured: ${pyTools.join(", ")}`,
);
}
pushUnique(
verificationCommands,
"pytest or the project quality command (lint + type + test stack from .sf/preferences.yaml)",
);
pushUnique(skillNeeds, "Python packaging, typing, and tests");
}
if (
hasFile(files, "Dockerfile") ||
hasFile(files, "docker-compose.yml") ||
hasFile(files, "compose.yaml")
) {
pushUnique(stackSignals, "Container/runtime configuration present");
pushUnique(skillNeeds, "Containerized runtime and deployment review");
}
if (
hasFile(files, "flake.nix") ||
hasDir(files, "nix") ||
hasDir(files, "nixos")
) {
pushUnique(stackSignals, "Nix/NixOS configuration present");
pushUnique(skillNeeds, "Nix build and deployment review");
}
if (
hasDir(files, "migrations") ||
hasDir(files, "db") ||
hasDir(files, "database") ||
hasExt(files, [".sql"])
) {
pushUnique(stackSignals, "Database schema or migration files present");
pushUnique(skillNeeds, "Database migration and persistence review");
pushUnique(
criticalPathHints,
"Database migrations and persistence code need schema/runtime alignment checks",
);
}
for (const dir of [
"src",
"app",
"cmd",
"internal",
"pkg",
"server",
"services",
"packages",
]) {
if (hasDir(files, dir)) {
pushUnique(
criticalPathHints,
`${dir}/ is a likely implementation boundary to map before planning`,
);
}
}
if (
hasDir(files, "api") ||
hasDir(files, "routes") ||
hasDir(files, "handlers")
) {
pushUnique(
criticalPathHints,
"API/handler directories exist; trace request paths end-to-end before changing behavior",
);
}
if (hasDir(files, "scripts")) {
pushUnique(
criticalPathHints,
"scripts/ may contain repo-owned build, test, or deploy entrypoints",
);
}
if (hasDir(files, "docs")) {
pushUnique(
criticalPathHints,
"docs/ may contain product or architecture decisions that constrain roadmap scope",
);
}
if (hasTestFile(files)) {
pushUnique(
criticalPathHints,
"Tracked tests exist; map coverage against the primary user/runtime flows",
);
} else {
pushUnique(
knowledgeGaps,
"No tracked test files detected by filename convention; verify actual quality gates before planning",
);
}
if (
!hasDir(files, ".github/workflows") &&
!hasFile(files, ".gitlab-ci.yml") &&
!hasFile(files, "Jenkinsfile")
) {
pushUnique(
knowledgeGaps,
"No common CI workflow file detected; identify the authoritative quality command",
);
}
if (!hasFile(files, "README.md") && !hasFile(files, "README.rst")) {
pushUnique(
knowledgeGaps,
"No README detected; infer product intent from code, docs, or user-provided specification",
);
}
if (stackSignals.length === 0) {
pushUnique(
knowledgeGaps,
"No common runtime manifest detected; inspect entrypoints manually before planning",
);
}
pushUnique(
knowledgeGaps,
"Fill descriptions for active milestone files after reading them, not from filenames alone",
);
pushUnique(
knowledgeGaps,
"Record verified runtime boundaries, external services, data stores, and missing skills before final CONTEXT.md",
);
return {
stackSignals: stackSignals.length
? stackSignals
: ["No stack signals inferred from common manifests"],
criticalPathHints: criticalPathHints.length
? criticalPathHints
: ["Map entrypoints manually; no common source directories detected"],
verificationCommands: verificationCommands.length
? verificationCommands
: ["Identify and run the repo-owned quality/test command"],
skillNeeds: skillNeeds.length
? skillNeeds
: [
"General codebase exploration skill; add domain-specific skills after stack discovery",
],
knowledgeGaps,
};
}
function renderProjectKnowledge(lines, knowledge) {
lines.push("## Project Knowledge");
lines.push("");
lines.push(
"Generated orientation scaffold. SF should enrich these sections with verified findings before promoting milestone context.",
);
lines.push("");
const sections = [
["Stack Signals", knowledge.stackSignals],
["Critical Paths To Investigate", knowledge.criticalPathHints],
["Verification Commands To Prove", knowledge.verificationCommands],
["Skill Needs", knowledge.skillNeeds],
["Knowledge Gaps To Close", knowledge.knowledgeGaps],
];
for (const [heading, items] of sections) {
lines.push(`### ${heading}`);
for (const item of items) {
lines.push(`- ${item}`);
}
lines.push("");
}
}
// ─── Rendering ───────────────────────────────────────────────────────────────
function renderCodebaseMap(groups, totalFiles, truncated, metadata, files) {
const lines = [];
const described = groups.reduce(
(sum, g) => sum + g.files.filter((f) => f.description).length,
0,
);
lines.push("# Codebase Map");
lines.push("");
lines.push(
`Generated: ${metadata.generatedAt} | Files: ${totalFiles} | Described: ${described}/${totalFiles}`,
);
lines.push(`${CODEBASE_METADATA_PREFIX}${JSON.stringify(metadata)} -->`);
if (truncated) {
lines.push(
`Note: Truncated to first ${totalFiles} files. Run with higher --max-files to include all.`,
);
}
lines.push("");
renderProjectKnowledge(lines, inferProjectKnowledge(files));
lines.push("## File Map");
lines.push("");
for (const group of groups) {
const heading = group.path || "(root)";
lines.push(`### ${heading}/`);
if (group.collapsed) {
// Summarize collapsed directories
const extensions = new Map();
for (const f of group.files) {
const ext = extname(f.path) || "(no ext)";
extensions.set(ext, (extensions.get(ext) ?? 0) + 1);
}
const extSummary = [...extensions.entries()]
.sort((a, b) => b[1] - a[1])
.map(([ext, count]) => `${count} ${ext}`)
.join(", ");
lines.push(`- *(${group.files.length} files: ${extSummary})*`);
// Preserve any existing descriptions in a hidden comment block so
// incremental updates can recover them via parseCodebaseMap.
const descLines = group.files
.filter((f) => f.description)
.map((f) => `- \`${f.path}\`${f.description}`);
if (descLines.length > 0) {
lines.push("<!-- sf:collapsed-descriptions");
lines.push(...descLines);
lines.push("-->");
}
} else {
for (const file of group.files) {
if (file.description) {
lines.push(`- \`${file.path}\`${file.description}`);
} else {
lines.push(`- \`${file.path}\``);
}
}
}
lines.push("");
}
return lines.join("\n");
}
function buildCodebaseMap(
basePath,
resolved,
existingDescriptions,
enumerated,
) {
const listed =
enumerated ??
enumerateFiles(basePath, resolved.excludes, resolved.maxFiles);
const descriptions = existingDescriptions ?? new Map();
const groups = groupByDirectory(
listed.files,
descriptions,
resolved.collapseThreshold,
);
const generatedAt = new Date().toISOString().split(".")[0] + "Z";
const metadata = {
generatedAt,
fingerprint: computeCodebaseFingerprint(
listed.files,
resolved,
listed.truncated,
),
fileCount: listed.files.length,
truncated: listed.truncated,
};
const content = renderCodebaseMap(
groups,
listed.files.length,
listed.truncated,
metadata,
listed.files,
);
return {
content,
fileCount: listed.files.length,
truncated: listed.truncated,
files: listed.files,
fingerprint: metadata.fingerprint,
generatedAt,
};
}
// ─── Public API ──────────────────────────────────────────────────────────────
/**
* Generate a fresh CODEBASE.md from scratch.
* Preserves existing descriptions if `existingDescriptions` is provided.
*/
export function generateCodebaseMap(basePath, options, existingDescriptions) {
const resolved = resolveGeneratorOptions(options);
return buildCodebaseMap(basePath, resolved, existingDescriptions);
}
/**
* Incremental update: re-scan files, preserve existing descriptions,
* add new files, remove deleted files.
*/
export function updateCodebaseMap(basePath, options) {
const codebasePath = join(sfRoot(basePath), "CODEBASE.md");
const resolved = resolveGeneratorOptions(options);
// Load existing descriptions
let existingDescriptions = new Map();
if (existsSync(codebasePath)) {
const existing = readFileSync(codebasePath, "utf-8");
existingDescriptions = parseCodebaseMap(existing);
}
const existingFiles = new Set(existingDescriptions.keys());
// Generate new map preserving descriptions — reuse the returned file list
// to avoid a second enumeration (prevents race between content and stats).
const result = buildCodebaseMap(basePath, resolved, existingDescriptions);
const currentSet = new Set(result.files);
// Count changes
let added = 0;
let removed = 0;
for (const f of result.files) {
if (!existingFiles.has(f)) added++;
}
for (const f of existingFiles) {
if (!currentSet.has(f)) removed++;
}
return {
content: result.content,
added,
removed,
unchanged: result.files.length - added,
fileCount: result.fileCount,
truncated: result.truncated,
fingerprint: result.fingerprint,
generatedAt: result.generatedAt,
};
}
function clearFreshnessCache(basePath) {
for (const key of freshnessCache.keys()) {
if (key === basePath || key.startsWith(`${basePath}::`)) {
freshnessCache.delete(key);
}
}
}
export function ensureCodebaseMapFresh(basePath, options, ensureOptions) {
const resolved = resolveGeneratorOptions(options);
const cacheKey = `${basePath}::${resolved.optionSignature}`;
const ttlMs = ensureOptions?.ttlMs ?? DEFAULT_REFRESH_TTL_MS;
const maxAgeMs = ensureOptions?.maxAgeMs ?? DEFAULT_MAX_AGE_MS;
const force = ensureOptions?.force === true;
const now = Date.now();
if (!force && ttlMs > 0) {
const cached = freshnessCache.get(cacheKey);
if (cached && now - cached.checkedAt < ttlMs) {
return cached.result;
}
}
const existing = readCodebaseMap(basePath);
const listed = enumerateFiles(basePath, resolved.excludes, resolved.maxFiles);
const fingerprint = computeCodebaseFingerprint(
listed.files,
resolved,
listed.truncated,
);
const cacheAndReturn = (result) => {
freshnessCache.set(cacheKey, { checkedAt: now, result });
return result;
};
if (!existing) {
const generated = buildCodebaseMap(basePath, resolved, undefined, listed);
if (generated.fileCount > 0) {
writeCodebaseMap(basePath, generated.content);
return cacheAndReturn({
status: "generated",
fileCount: generated.fileCount,
truncated: generated.truncated,
generatedAt: generated.generatedAt,
fingerprint: generated.fingerprint,
reason: "missing",
});
}
return cacheAndReturn({
status: "empty",
fileCount: 0,
truncated: false,
generatedAt: null,
fingerprint,
reason: "no-tracked-files",
});
}
const metadata = parseCodebaseMapMetadata(existing);
const existingDescriptions = parseCodebaseMap(existing);
const ageMs = metadata
? now - Date.parse(metadata.generatedAt)
: Number.POSITIVE_INFINITY;
const staleReason = !metadata
? undefined // no metadata = manually maintained by research agent, never auto-overwrite
: metadata.fingerprint !== fingerprint
? "files-changed"
: metadata.fileCount !== listed.files.length
? "file-count-changed"
: metadata.truncated !== listed.truncated
? "truncation-changed"
: maxAgeMs > 0 && Number.isFinite(ageMs) && ageMs > maxAgeMs
? "expired"
: undefined;
if (!staleReason) {
return cacheAndReturn({
status: "fresh",
fileCount: metadata?.fileCount ?? listed.files.length,
truncated: metadata?.truncated ?? listed.truncated,
generatedAt: metadata?.generatedAt ?? null,
fingerprint: metadata?.fingerprint ?? fingerprint,
});
}
const updated = buildCodebaseMap(
basePath,
resolved,
existingDescriptions,
listed,
);
if (updated.fileCount > 0) {
writeCodebaseMap(basePath, updated.content);
return cacheAndReturn({
status: "updated",
fileCount: updated.fileCount,
truncated: updated.truncated,
generatedAt: updated.generatedAt,
fingerprint: updated.fingerprint,
reason: staleReason,
});
}
return cacheAndReturn({
status: "empty",
fileCount: 0,
truncated: false,
generatedAt: null,
fingerprint,
reason: staleReason,
});
}
/**
* Write CODEBASE.md to .sf/ directory.
*/
export function writeCodebaseMap(basePath, content) {
const root = sfRoot(basePath);
mkdirSync(root, { recursive: true });
const outPath = join(root, "CODEBASE.md");
writeFileSync(outPath, content, "utf-8");
clearFreshnessCache(basePath);
return outPath;
}
/**
* Read existing CODEBASE.md, or return null if it doesn't exist.
*/
export function readCodebaseMap(basePath) {
const codebasePath = join(sfRoot(basePath), "CODEBASE.md");
if (!existsSync(codebasePath)) return null;
try {
return readFileSync(codebasePath, "utf-8");
} catch {
return null;
}
}
/**
* Get stats about the codebase map.
*/
export function getCodebaseMapStats(basePath) {
const content = readCodebaseMap(basePath);
if (!content) {
return {
exists: false,
fileCount: 0,
describedCount: 0,
undescribedCount: 0,
generatedAt: null,
};
}
// Parse total file count from the header line (accurate even for collapsed dirs)
const fileCountMatch = content.match(/Files:\s*(\d+)/);
const totalFiles = fileCountMatch ? parseInt(fileCountMatch[1], 10) : 0;
// Use parseCodebaseMap to count described files (includes collapsed-description blocks)
const descriptions = parseCodebaseMap(content);
const described = [...descriptions.values()].filter(
(d) => d.length > 0,
).length;
const dateMatch = content.match(/Generated: (\S+)/);
return {
exists: true,
fileCount: totalFiles,
describedCount: described,
undescribedCount: totalFiles - described,
generatedAt: dateMatch?.[1] ?? null,
};
}