fix: align scaffold sync and gemini listings
This commit is contained in:
parent
66e8265320
commit
180f8e131e
9 changed files with 313 additions and 37 deletions
|
|
@ -1,12 +0,0 @@
|
|||
{
|
||||
"last_session_id": "67e970c5-7790-4d38-ba0b-527b9f349c49",
|
||||
"last_event_key": "67e970c5-7790-4d38-ba0b-527b9f349c49:transcript:4fb7e8afb9c1c96fda3a464c707cde5137eba863cb21384f4d929867c14d1d9a",
|
||||
"last_prompted_session_id": "",
|
||||
"last_reason": "short-session",
|
||||
"last_prompted_at": "",
|
||||
"last_user_message_count": 0,
|
||||
"last_actionable_message_count": 0,
|
||||
"deep_interview_lock_active": false,
|
||||
"deep_interview_lock_source": "/home/mhugo/code/singularity-forge/.omg/state/deep-interview.json",
|
||||
"updated_at": "2026-05-04T20:36:06.661Z"
|
||||
}
|
||||
|
|
@ -719,6 +719,7 @@ async function generateModels() {
|
|||
...aiGatewayModels,
|
||||
].filter(
|
||||
(model) =>
|
||||
!model.id.endsWith("-customtools") &&
|
||||
!(
|
||||
(model.provider === "opencode" || model.provider === "opencode-go") &&
|
||||
model.id === "gpt-5.3-codex-spark"
|
||||
|
|
|
|||
|
|
@ -18,6 +18,26 @@ import type { Api, Model } from "./types.js";
|
|||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
describe("model registry — custom providers", () => {
|
||||
it("hides Gemini customtools variants from the runtime registry", () => {
|
||||
const googleModels = getModels("google").map((model) => model.id);
|
||||
const geminiCliModels = getModels("google-gemini-cli").map(
|
||||
(model) => model.id,
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
googleModels.some((id) => id.endsWith("-customtools")),
|
||||
false,
|
||||
);
|
||||
assert.equal(
|
||||
geminiCliModels.some((id) => id.endsWith("-customtools")),
|
||||
false,
|
||||
);
|
||||
assert.equal(
|
||||
getModel("google" as any, "gemini-3.1-pro-preview-customtools" as any),
|
||||
undefined,
|
||||
);
|
||||
});
|
||||
|
||||
it("alibaba-coding-plan is a registered provider", () => {
|
||||
const providers = getProviders();
|
||||
assert.ok(
|
||||
|
|
|
|||
|
|
@ -10,10 +10,15 @@ import type {
|
|||
|
||||
const modelRegistry: Map<string, Map<string, Model<Api>>> = new Map();
|
||||
|
||||
function isHiddenBuiltInModelId(id: string): boolean {
|
||||
return id.endsWith("-customtools");
|
||||
}
|
||||
|
||||
// Initialize registry from auto-generated MODELS (models.dev catalog)
|
||||
for (const [provider, models] of Object.entries(MODELS)) {
|
||||
const providerModels = new Map<string, Model<Api>>();
|
||||
for (const [id, model] of Object.entries(models)) {
|
||||
if (isHiddenBuiltInModelId(id)) continue;
|
||||
providerModels.set(id, model as Model<Api>);
|
||||
}
|
||||
modelRegistry.set(provider, providerModels);
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ import type { Context, Model } from "../types.js";
|
|||
const geminiCliCore = vi.hoisted(() => ({
|
||||
retryError: undefined as Error | undefined,
|
||||
retryOptions: undefined as Record<string, unknown> | undefined,
|
||||
fakeConfigParams: undefined as Record<string, unknown> | undefined,
|
||||
generatorAuthType: undefined as unknown,
|
||||
}));
|
||||
|
||||
vi.mock("@google/gemini-cli-core", () => ({
|
||||
|
|
@ -15,7 +17,10 @@ vi.mock("@google/gemini-cli-core", () => ({
|
|||
}
|
||||
},
|
||||
getOauthClient: vi.fn(async () => ({})),
|
||||
makeFakeConfig: vi.fn(() => ({})),
|
||||
makeFakeConfig: vi.fn((params: Record<string, unknown>) => {
|
||||
geminiCliCore.fakeConfigParams = params;
|
||||
return { params };
|
||||
}),
|
||||
retryWithBackoff: vi.fn(
|
||||
async (_fn: unknown, options: Record<string, unknown>) => {
|
||||
geminiCliCore.retryOptions = options;
|
||||
|
|
@ -25,6 +30,18 @@ vi.mock("@google/gemini-cli-core", () => ({
|
|||
setupUser: vi.fn(async () => ({ projectId: "test-project" })),
|
||||
}));
|
||||
|
||||
vi.mock("@google/gemini-cli-core/dist/src/core/contentGenerator.js", () => ({
|
||||
createContentGeneratorConfig: vi.fn(async (_config, authType) => {
|
||||
geminiCliCore.generatorAuthType = authType;
|
||||
return { authType };
|
||||
}),
|
||||
createContentGenerator: vi.fn(async () => ({
|
||||
async generateContentStream(): Promise<AsyncGenerator<unknown>> {
|
||||
return (async function* emptyStream() {})();
|
||||
},
|
||||
})),
|
||||
}));
|
||||
|
||||
import { streamGoogleGeminiCli } from "./google-gemini-cli.js";
|
||||
|
||||
function makeModel(): Model<"google-gemini-cli"> {
|
||||
|
|
@ -65,6 +82,12 @@ describe("google-gemini-cli provider retry ownership", () => {
|
|||
| { maxAttempts?: unknown }
|
||||
| undefined;
|
||||
assert.equal(retryOptions?.maxAttempts, 1);
|
||||
assert.equal(
|
||||
geminiCliCore.fakeConfigParams?.model,
|
||||
"gemini-3-flash-preview",
|
||||
);
|
||||
assert.equal(geminiCliCore.fakeConfigParams?.clientName, undefined);
|
||||
assert.equal(geminiCliCore.generatorAuthType, "LOGIN_WITH_GOOGLE");
|
||||
assert.equal(result.stopReason, "error");
|
||||
assert.match(result.errorMessage ?? "", /exhausted your capacity/i);
|
||||
assert.equal(result.retryAfterMs, 54_000);
|
||||
|
|
|
|||
|
|
@ -14,8 +14,11 @@ import {
|
|||
makeFakeConfig,
|
||||
retryWithBackoff,
|
||||
} from "@google/gemini-cli-core";
|
||||
import { createCodeAssistContentGenerator } from "@google/gemini-cli-core/dist/src/code_assist/codeAssist.js";
|
||||
import type { ContentGenerator } from "@google/gemini-cli-core/dist/src/core/contentGenerator.js";
|
||||
import {
|
||||
createContentGenerator,
|
||||
createContentGeneratorConfig,
|
||||
} from "@google/gemini-cli-core/dist/src/core/contentGenerator.js";
|
||||
import type {
|
||||
Content,
|
||||
GenerateContentParameters,
|
||||
|
|
@ -98,26 +101,27 @@ export interface GoogleGeminiCliOptions extends StreamOptions {
|
|||
let toolCallCounter = 0;
|
||||
|
||||
/**
|
||||
* Build a Code Assist content generator using cli-core's own auth + project discovery.
|
||||
* Build a Code Assist content generator using cli-core's official content-generator path.
|
||||
*
|
||||
* - getOauthClient() reads ~/.gemini/oauth_creds.json when present, refreshes if
|
||||
* expired. cli-core owns any interactive login flow it needs.
|
||||
* - setupUser() asks the Code Assist API for the project + tier tied to this
|
||||
* identity (free-tier auto-provisioned if needed; otherwise whatever the
|
||||
* user has been onboarded to server-side).
|
||||
* - createCodeAssistContentGenerator() passes the returned tier and paid-tier
|
||||
* data into CodeAssistServer, matching the official Gemini CLI path.
|
||||
* Upstream Gemini CLI does not instantiate CodeAssistServer directly from the
|
||||
* caller. It creates a ContentGeneratorConfig, lets createContentGenerator()
|
||||
* build the GeminiCLI User-Agent and transport headers, then delegates to
|
||||
* createCodeAssistContentGenerator() for OAuth, setupUser(), and Code Assist.
|
||||
*
|
||||
* Both calls memoize internally inside cli-core — repeat invocations are
|
||||
* cheap.
|
||||
*/
|
||||
async function getCodeAssistServer(): Promise<ContentGenerator> {
|
||||
const config = makeFakeConfig();
|
||||
return createCodeAssistContentGenerator(
|
||||
{ headers: {} },
|
||||
AuthType.LOGIN_WITH_GOOGLE,
|
||||
async function getCodeAssistServer(modelId: string): Promise<ContentGenerator> {
|
||||
const config = makeFakeConfig({
|
||||
model: modelId,
|
||||
cwd: process.cwd(),
|
||||
targetDir: process.cwd(),
|
||||
});
|
||||
const generatorConfig = await createContentGeneratorConfig(
|
||||
config,
|
||||
AuthType.LOGIN_WITH_GOOGLE,
|
||||
);
|
||||
return createContentGenerator(generatorConfig, config);
|
||||
}
|
||||
|
||||
function parseDurationMs(value: string): number | undefined {
|
||||
|
|
@ -213,14 +217,14 @@ export const streamGoogleGeminiCli: StreamFunction<
|
|||
};
|
||||
|
||||
try {
|
||||
// cli-core handles auth + project discovery. SF uses cli-core directly
|
||||
// and does not spawn a separate provider CLI process.
|
||||
const server = await getCodeAssistServer();
|
||||
let req = buildRequest(model, context, options);
|
||||
const nextReq = await options?.onPayload?.(req, model);
|
||||
if (nextReq !== undefined) {
|
||||
req = nextReq as GenerateContentParameters;
|
||||
}
|
||||
// cli-core handles auth + project discovery. SF uses cli-core directly
|
||||
// and does not spawn a separate provider CLI process.
|
||||
const server = await getCodeAssistServer(req.model);
|
||||
const promptId = `pi-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
|
||||
// Cast through `any` — cli-core bundles its own nested @google/genai copy,
|
||||
// so TypeScript sees two structurally-identical-but-distinct Content types.
|
||||
|
|
|
|||
|
|
@ -2,12 +2,16 @@ import {
|
|||
existsSync,
|
||||
mkdirSync,
|
||||
readdirSync,
|
||||
readFileSync,
|
||||
rmdirSync,
|
||||
rmSync,
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
import { dirname, join } from "node:path";
|
||||
import { migrateLegacyScaffold } from "./scaffold-drift.js";
|
||||
import {
|
||||
migrateLegacyGsdScaffold,
|
||||
migrateLegacyScaffold,
|
||||
} from "./scaffold-drift.js";
|
||||
import {
|
||||
bodyHash,
|
||||
extractMarker,
|
||||
|
|
@ -29,6 +33,13 @@ const LEGACY_ROOT_HARNESS_PATHS = [
|
|||
"harness/evals/AGENTS.md",
|
||||
"harness/graders/AGENTS.md",
|
||||
];
|
||||
const LEGACY_ROOT_HARNESS_TARGETS = {
|
||||
"harness/AGENTS.md": ".sf/harness/AGENTS.md",
|
||||
"harness/specs/AGENTS.md": ".sf/harness/specs/AGENTS.md",
|
||||
"harness/specs/bootstrap.md": ".sf/harness/specs/bootstrap.md",
|
||||
"harness/evals/AGENTS.md": ".sf/harness/evals/AGENTS.md",
|
||||
"harness/graders/AGENTS.md": ".sf/harness/graders/AGENTS.md",
|
||||
};
|
||||
/**
|
||||
* Canonical scaffold file templates SF manages for agent legibility.
|
||||
*
|
||||
|
|
@ -501,16 +512,34 @@ function pruneEmptyDir(path) {
|
|||
}
|
||||
}
|
||||
|
||||
function scaffoldContentForPath(relPath) {
|
||||
return SCAFFOLD_FILES.find((file) => file.path === relPath)?.content ?? null;
|
||||
}
|
||||
|
||||
function isUnmarkedGeneratedRootHarness(relPath, body) {
|
||||
const currentRelPath = LEGACY_ROOT_HARNESS_TARGETS[relPath];
|
||||
const currentContent = currentRelPath
|
||||
? scaffoldContentForPath(currentRelPath)
|
||||
: null;
|
||||
if (!currentContent) return false;
|
||||
return body.trim() === currentContent.trim();
|
||||
}
|
||||
|
||||
function removeLegacyRootHarnessScaffold(basePath) {
|
||||
for (const relPath of LEGACY_ROOT_HARNESS_PATHS) {
|
||||
const target = join(basePath, relPath);
|
||||
try {
|
||||
if (!existsSync(target)) continue;
|
||||
const { marker, body } = extractMarker(target);
|
||||
if (!marker) continue;
|
||||
if (marker.template !== relPath) continue;
|
||||
if (marker.state !== "pending") continue;
|
||||
if (bodyHash(body) !== marker.hash) continue;
|
||||
if (marker) {
|
||||
if (marker.template !== relPath) continue;
|
||||
if (marker.state !== "pending") continue;
|
||||
if (bodyHash(body) !== marker.hash) continue;
|
||||
} else if (
|
||||
!isUnmarkedGeneratedRootHarness(relPath, readFileSync(target, "utf-8"))
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
rmSync(target);
|
||||
} catch (err) {
|
||||
logWarning("scaffold", "failed to remove legacy root harness file", {
|
||||
|
|
@ -554,6 +583,13 @@ export function ensureAgenticDocsScaffold(basePath) {
|
|||
error: err.message,
|
||||
});
|
||||
}
|
||||
try {
|
||||
migrateLegacyGsdScaffold(basePath);
|
||||
} catch (err) {
|
||||
logWarning("scaffold", "legacy GSD migration failed", {
|
||||
error: err.message,
|
||||
});
|
||||
}
|
||||
removeLegacyRootHarnessScaffold(basePath);
|
||||
// Step 2: missing-file creation + pending-state silent upgrade.
|
||||
for (const file of SCAFFOLD_FILES) {
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
* buckets. The result is structured and side-effect-free — Phase C wires
|
||||
* the report into the scaffold sync pipeline; Phase B is data-plane only.
|
||||
*/
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { SCAFFOLD_FILES } from "./agentic-docs-scaffold.js";
|
||||
import {
|
||||
|
|
@ -48,8 +48,46 @@ function emptyCounts() {
|
|||
"editing-drift": 0,
|
||||
untracked: 0,
|
||||
current: 0,
|
||||
customized: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const LEGACY_GSD_MANAGED_MARKER_RE = /<!--\s*\/?GSD:/i;
|
||||
|
||||
/**
|
||||
* Return true when a marker-less scaffold file still contains old managed GSD
|
||||
* section markers.
|
||||
*
|
||||
* Purpose: let scaffold sync identify generated legacy guidance in projects
|
||||
* that predate ADR-021's `sf-doc` marker without treating arbitrary prose as
|
||||
* safe to rewrite.
|
||||
*
|
||||
* Consumer: detectScaffoldDrift and migrateLegacyGsdScaffold.
|
||||
*/
|
||||
export function hasLegacyGsdManagedMarkers(body) {
|
||||
return LEGACY_GSD_MANAGED_MARKER_RE.test(body);
|
||||
}
|
||||
|
||||
/**
|
||||
* Rewrite legacy GSD-generated scaffold text to SF naming.
|
||||
*
|
||||
* Purpose: preserve repo-local custom prose while removing obsolete product
|
||||
* names from sections that carry old managed GSD markers.
|
||||
*
|
||||
* Consumer: migrateLegacyGsdScaffold during startup and `/sf scaffold sync`.
|
||||
*/
|
||||
export function rewriteLegacyGsdManagedBody(body) {
|
||||
return body
|
||||
.replace(/<!--\s*GSD:/g, "<!-- SF:")
|
||||
.replace(/<!--\s*\/GSD:/g, "<!-- /SF:")
|
||||
.replace(/\.gsd\//g, ".sf/")
|
||||
.replace(/\.gsd\b/g, ".sf")
|
||||
.replace(/\/gsd-([a-z0-9-]+)/gi, "/sf $1")
|
||||
.replace(/\bgsd\s+headless\s+auto\b/gi, "sf headless auto")
|
||||
.replace(/\bgsd\s+auto\b/gi, "sf autonomous")
|
||||
.replace(/\bGSD\b/g, "SF")
|
||||
.replace(/\bgsd\b/g, "sf");
|
||||
}
|
||||
/**
|
||||
* Classify every `SCAFFOLD_FILES` entry against its on-disk state.
|
||||
*
|
||||
|
|
@ -127,6 +165,19 @@ export function detectScaffoldDrift(basePath) {
|
|||
body = "";
|
||||
}
|
||||
}
|
||||
if (hasLegacyGsdManagedMarkers(body)) {
|
||||
items.push({
|
||||
path: file.path,
|
||||
template: file.path,
|
||||
bucket: "upgradable",
|
||||
currentVersion: "legacy-gsd",
|
||||
shipVersion,
|
||||
hashDrifted: false,
|
||||
legacyMigration: "gsd-managed-markers",
|
||||
});
|
||||
counts.upgradable += 1;
|
||||
continue;
|
||||
}
|
||||
items.push({
|
||||
path: file.path,
|
||||
template: file.path,
|
||||
|
|
@ -333,3 +384,62 @@ export function migrateLegacyScaffold(basePath) {
|
|||
}
|
||||
return { migrated, skipped };
|
||||
}
|
||||
|
||||
/**
|
||||
* Upgrade marker-less scaffold files that still carry legacy GSD managed
|
||||
* section markers.
|
||||
*
|
||||
* Purpose: make `/sf scaffold sync` responsible for stale generated SF/GSD
|
||||
* guidance without overwriting repo-specific content around those sections.
|
||||
*
|
||||
* Consumer: ensureAgenticDocsScaffold before normal missing-file and pending
|
||||
* version refresh handling.
|
||||
*/
|
||||
export function migrateLegacyGsdScaffold(basePath) {
|
||||
const shipVersion = process.env.SF_VERSION || "0.0.0";
|
||||
const migrated = [];
|
||||
const skipped = [];
|
||||
const appliedAt = new Date().toISOString();
|
||||
for (const file of SCAFFOLD_FILES) {
|
||||
if (SKIP_MARKER_PATHS.has(file.path)) continue;
|
||||
const target = join(basePath, file.path);
|
||||
if (!existsSync(target)) continue;
|
||||
let body;
|
||||
let markerPresent = false;
|
||||
try {
|
||||
const extracted = extractMarker(target);
|
||||
markerPresent = extracted.marker !== null;
|
||||
body = extracted.body;
|
||||
} catch (err) {
|
||||
logWarning("scaffold", "failed to read file during GSD migration", {
|
||||
file: file.path,
|
||||
error: err.message,
|
||||
});
|
||||
skipped.push(file.path);
|
||||
continue;
|
||||
}
|
||||
if (markerPresent) continue;
|
||||
if (!hasLegacyGsdManagedMarkers(body)) continue;
|
||||
const upgraded = rewriteLegacyGsdManagedBody(body);
|
||||
try {
|
||||
writeFileSync(target, upgraded, "utf-8");
|
||||
stampScaffoldFile(target, file.path, shipVersion, "completed");
|
||||
recordScaffoldApply(basePath, {
|
||||
path: file.path,
|
||||
template: file.path,
|
||||
version: shipVersion,
|
||||
appliedAt,
|
||||
stateAtApply: "completed",
|
||||
contentHash: bodyHash(upgraded),
|
||||
});
|
||||
migrated.push(file.path);
|
||||
} catch (err) {
|
||||
logWarning("scaffold", "failed to upgrade legacy GSD scaffold file", {
|
||||
file: file.path,
|
||||
error: err.message,
|
||||
});
|
||||
skipped.push(file.path);
|
||||
}
|
||||
}
|
||||
return { migrated, skipped };
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@ import { tmpdir } from "node:os";
|
|||
import { dirname, join } from "node:path";
|
||||
import { afterEach, test } from "vitest";
|
||||
import { ensureAgenticDocsScaffold } from "../agentic-docs-scaffold.js";
|
||||
import { stampScaffoldFile } from "../scaffold-versioning.js";
|
||||
import { detectScaffoldDrift } from "../scaffold-drift.js";
|
||||
import { extractMarker, stampScaffoldFile } from "../scaffold-versioning.js";
|
||||
|
||||
const tmpRoots = [];
|
||||
|
||||
|
|
@ -42,3 +43,91 @@ test("ensureAgenticDocsScaffold_removes_owned_root_harness_and_writes_sf_harness
|
|||
assert.equal(existsSync(join(root, "harness")), false);
|
||||
assert.equal(existsSync(join(root, ".sf/harness/specs/bootstrap.md")), true);
|
||||
});
|
||||
|
||||
test("detectScaffoldDrift_when_unmarked_gsd_managed_section_marks_upgradable", () => {
|
||||
const root = makeProject();
|
||||
const target = join(root, "AGENTS.md");
|
||||
writeFileSync(
|
||||
target,
|
||||
[
|
||||
"# Local Agents",
|
||||
"",
|
||||
"<!-- GSD:project-start source:PROJECT.md -->",
|
||||
"Run `gsd headless auto` and keep `.gsd/` state current.",
|
||||
"<!-- /GSD:project-end -->",
|
||||
"",
|
||||
].join("\n"),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const item = detectScaffoldDrift(root).items.find(
|
||||
(candidate) => candidate.path === "AGENTS.md",
|
||||
);
|
||||
|
||||
assert.equal(item.bucket, "upgradable");
|
||||
assert.equal(item.legacyMigration, "gsd-managed-markers");
|
||||
});
|
||||
|
||||
test("ensureAgenticDocsScaffold_rewrites_and_stamps_legacy_gsd_managed_section", () => {
|
||||
const root = makeProject();
|
||||
const target = join(root, "AGENTS.md");
|
||||
writeFileSync(
|
||||
target,
|
||||
[
|
||||
"# Local Agents",
|
||||
"",
|
||||
"Keep this repo note.",
|
||||
"<!-- GSD:project-start source:PROJECT.md -->",
|
||||
"Run `gsd headless auto` and keep `.gsd/` state current.",
|
||||
"<!-- /GSD:project-end -->",
|
||||
"",
|
||||
].join("\n"),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
ensureAgenticDocsScaffold(root);
|
||||
|
||||
const { marker, body } = extractMarker(target);
|
||||
assert.equal(marker?.state, "completed");
|
||||
assert.equal(marker?.template, "AGENTS.md");
|
||||
assert.match(body, /Keep this repo note\./);
|
||||
assert.match(body, /<!-- SF:project-start source:PROJECT\.md -->/);
|
||||
assert.match(body, /sf headless auto/);
|
||||
assert.match(body, /\.sf\//);
|
||||
assert.doesNotMatch(body, /GSD|gsd|\.gsd/);
|
||||
});
|
||||
|
||||
test("ensureAgenticDocsScaffold_removes_unmarked_generated_root_harness", () => {
|
||||
const root = makeProject();
|
||||
const target = join(root, "harness/specs/bootstrap.md");
|
||||
mkdirSync(dirname(target), { recursive: true });
|
||||
writeFileSync(
|
||||
target,
|
||||
`# Bootstrap Spec: Agent Legibility
|
||||
|
||||
Verifies that this repo is minimally agent-legible.
|
||||
|
||||
## Criteria
|
||||
|
||||
- [ ] \`AGENTS.md\` exists at repo root and is non-empty.
|
||||
- [ ] \`ARCHITECTURE.md\` exists at repo root and is non-empty.
|
||||
- [ ] \`docs/exec-plans/active/\` exists.
|
||||
- [ ] \`docs/exec-plans/tech-debt-tracker.md\` exists.
|
||||
- [ ] \`docs/design-docs/ADR-TEMPLATE.md\` exists.
|
||||
|
||||
## Verification command
|
||||
|
||||
\`\`\`bash
|
||||
for f in AGENTS.md ARCHITECTURE.md docs/exec-plans/active/index.md docs/exec-plans/tech-debt-tracker.md docs/design-docs/ADR-TEMPLATE.md .sf/harness/specs/bootstrap.md; do [ -s "$f" ] && echo "OK: $f" || echo "MISSING: $f"; done
|
||||
\`\`\`
|
||||
|
||||
All lines should start with \`OK:\` for the bootstrap spec to pass.
|
||||
`,
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
ensureAgenticDocsScaffold(root);
|
||||
|
||||
assert.equal(existsSync(join(root, "harness")), false);
|
||||
assert.equal(existsSync(join(root, ".sf/harness/specs/bootstrap.md")), true);
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue