fix: make doctor repair sf form drift
This commit is contained in:
parent
87d49abd87
commit
8fd48a5ad6
4 changed files with 153 additions and 6 deletions
|
|
@ -23,9 +23,7 @@
|
|||
clippy
|
||||
git
|
||||
just
|
||||
nodejs_24
|
||||
pkg-config
|
||||
nodePackages.typescript
|
||||
protobuf
|
||||
rust-analyzer
|
||||
rustc
|
||||
|
|
@ -36,6 +34,12 @@
|
|||
|
||||
shellHook = ''
|
||||
export SF_SOURCE_DIR="${toString ./.}"
|
||||
if [ -x "$HOME/.local/bin/mise" ]; then
|
||||
MISE_NODE_BIN="$("$HOME/.local/bin/mise" which node 2>/dev/null || true)"
|
||||
if [ -n "$MISE_NODE_BIN" ]; then
|
||||
export PATH="$(dirname "$MISE_NODE_BIN"):$PATH"
|
||||
fi
|
||||
fi
|
||||
export PATH="$SF_SOURCE_DIR/bin:$PATH"
|
||||
export RUST_BACKTRACE=1
|
||||
|
||||
|
|
|
|||
|
|
@ -312,8 +312,12 @@ export async function checkRuntimeHealth(
|
|||
// ── Gitignore drift ───────────────────────────────────────────────────
|
||||
try {
|
||||
const gitignorePath = join(basePath, ".gitignore");
|
||||
const excludePath = join(basePath, ".git", "info", "exclude");
|
||||
if (existsSync(gitignorePath) && nativeIsRepo(basePath)) {
|
||||
const content = readFileSync(gitignorePath, "utf-8");
|
||||
const content = [
|
||||
readFileSync(gitignorePath, "utf-8"),
|
||||
existsSync(excludePath) ? readFileSync(excludePath, "utf-8") : "",
|
||||
].join("\n");
|
||||
const existingLines = new Set(
|
||||
content
|
||||
.split("\n")
|
||||
|
|
@ -334,8 +338,8 @@ export async function checkRuntimeHealth(
|
|||
code: "gitignore_missing_patterns",
|
||||
scope: "project",
|
||||
unitId: "project",
|
||||
message: `${missing.length} critical SF runtime pattern(s) missing from .gitignore: ${missing.join(", ")}`,
|
||||
file: ".gitignore",
|
||||
message: `${missing.length} critical SF runtime pattern(s) missing from .gitignore or .git/info/exclude: ${missing.join(", ")}`,
|
||||
file: ".git/info/exclude",
|
||||
fixable: true,
|
||||
});
|
||||
if (shouldFix("gitignore_missing_patterns")) {
|
||||
|
|
|
|||
|
|
@ -203,6 +203,68 @@ function parseMarkdownFrontmatter(content) {
|
|||
}
|
||||
}
|
||||
|
||||
function normalizeFrontmatterArrayScalars(frontmatter) {
|
||||
const lines = frontmatter.split("\n");
|
||||
const output = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const itemMatch = line.match(/^(\s*)-\s+(.+)$/);
|
||||
if (!itemMatch) {
|
||||
output.push(line);
|
||||
continue;
|
||||
}
|
||||
const indent = itemMatch[1];
|
||||
const continuation = [itemMatch[2]];
|
||||
while (i + 1 < lines.length) {
|
||||
const next = lines[i + 1];
|
||||
if (/^\S[^:]*:\s*/.test(next)) break;
|
||||
if (next.match(/^(\s*)-\s+(.+)$/)?.[1] === indent) break;
|
||||
if (next.trim() === "") break;
|
||||
continuation.push(next.trim());
|
||||
i++;
|
||||
}
|
||||
output.push(`${indent}- ${JSON.stringify(continuation.join("\\n"))}`);
|
||||
}
|
||||
return output.join("\n");
|
||||
}
|
||||
|
||||
function repairMarkdownFrontmatter(content) {
|
||||
if (!content.startsWith("---\n") && !content.startsWith("---\r\n")) {
|
||||
return null;
|
||||
}
|
||||
const normalized = content.replace(/\r\n/g, "\n");
|
||||
const end = normalized.indexOf("\n---\n", 4);
|
||||
if (end === -1) return null;
|
||||
const frontmatter = normalized.slice(4, end);
|
||||
const repairedFrontmatter = normalizeFrontmatterArrayScalars(frontmatter);
|
||||
if (repairedFrontmatter === frontmatter) return null;
|
||||
try {
|
||||
parseYaml(repairedFrontmatter);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
return `---\n${repairedFrontmatter}\n---\n${normalized.slice(end + 5)}`;
|
||||
}
|
||||
|
||||
function repairJsonl(content) {
|
||||
if (!content.trim()) return null;
|
||||
try {
|
||||
const parsed = JSON.parse(content);
|
||||
if (parsed && typeof parsed === "object") {
|
||||
return `${JSON.stringify(parsed)}\n`;
|
||||
}
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function repairSfFormContent(ext, content) {
|
||||
if (ext === ".jsonl") return repairJsonl(content);
|
||||
if (ext === ".md") return repairMarkdownFrontmatter(content);
|
||||
return null;
|
||||
}
|
||||
|
||||
function checkSfFormSyntax(basePath, issues, fixesApplied, shouldFix) {
|
||||
const root = sfRoot(basePath);
|
||||
for (const relPath of collectSfFormFiles(basePath)) {
|
||||
|
|
@ -229,6 +291,8 @@ function checkSfFormSyntax(basePath, issues, fixesApplied, shouldFix) {
|
|||
parseError = error instanceof Error ? error.message : String(error);
|
||||
}
|
||||
if (parseError) {
|
||||
const repaired = repairSfFormContent(ext, content);
|
||||
const repairable = repaired !== null;
|
||||
issues.push({
|
||||
severity: "error",
|
||||
code: "invalid_sf_form",
|
||||
|
|
@ -236,8 +300,13 @@ function checkSfFormSyntax(basePath, issues, fixesApplied, shouldFix) {
|
|||
unitId: "project",
|
||||
message: `.sf/${relPath} has invalid ${ext.slice(1) || "form"} syntax: ${parseError}`,
|
||||
file: `.sf/${relPath}`,
|
||||
fixable: false,
|
||||
fixable: repairable,
|
||||
});
|
||||
if (repairable && shouldFix("invalid_sf_form")) {
|
||||
writeFileSync(filePath, repaired, "utf-8");
|
||||
content = repaired;
|
||||
fixesApplied.push(`repaired .sf form syntax in .sf/${relPath}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
|
|
|
|||
|
|
@ -79,4 +79,74 @@ describe("doctor .sf form lint", () => {
|
|||
"# SF guidance\n\nSF owns this scaffold.\n",
|
||||
);
|
||||
});
|
||||
|
||||
test("runSFDoctor_fix_repairs_json_object_written_as_jsonl", async () => {
|
||||
const project = makeProject();
|
||||
const target = join(project, ".sf", "self-feedback-resolved.jsonl");
|
||||
writeFileSync(
|
||||
target,
|
||||
'{\n "id": "sf-test",\n "resolvedAt": "2026-05-05T00:00:00.000Z"\n}\n',
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const report = await runSFDoctor(project, {
|
||||
fix: true,
|
||||
fixLevel: "all",
|
||||
scope: "project",
|
||||
});
|
||||
|
||||
assert.ok(
|
||||
report.fixesApplied.includes(
|
||||
"repaired .sf form syntax in .sf/self-feedback-resolved.jsonl",
|
||||
),
|
||||
);
|
||||
assert.equal(
|
||||
readFileSync(target, "utf-8"),
|
||||
'{"id":"sf-test","resolvedAt":"2026-05-05T00:00:00.000Z"}\n',
|
||||
);
|
||||
});
|
||||
|
||||
test("runSFDoctor_fix_quotes_frontmatter_array_scalars_with_colons_and_newlines", async () => {
|
||||
const project = makeProject();
|
||||
const target = join(project, ".sf", "milestones", "M001", "SUMMARY.md");
|
||||
mkdirSync(join(project, ".sf", "milestones", "M001"), {
|
||||
recursive: true,
|
||||
});
|
||||
writeFileSync(
|
||||
target,
|
||||
[
|
||||
"---",
|
||||
"id: M001",
|
||||
"key_decisions:",
|
||||
" - Fixed test with clear: false and clear: true cases.",
|
||||
" - Empty files contain one newline (not `[]",
|
||||
"`) so split works.",
|
||||
"---",
|
||||
"# Summary",
|
||||
"",
|
||||
].join("\n"),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const report = await runSFDoctor(project, {
|
||||
fix: true,
|
||||
fixLevel: "all",
|
||||
scope: "project",
|
||||
});
|
||||
|
||||
assert.ok(
|
||||
report.fixesApplied.includes(
|
||||
"repaired .sf form syntax in .sf/milestones/M001/SUMMARY.md",
|
||||
),
|
||||
);
|
||||
const repaired = readFileSync(target, "utf-8");
|
||||
assert.match(
|
||||
repaired,
|
||||
/ {2}- "Fixed test with clear: false and clear: true cases\."/,
|
||||
);
|
||||
assert.match(
|
||||
repaired,
|
||||
/ {2}- "Empty files contain one newline .*\\\\n.* so split works\."/,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue