feat(dev-server): auto-reload on SF extension + coding-agent + git upgrades

Before: dev-server watched packages/daemon/src + dev scripts + package.json.
SF extension source edits in src/resources/extensions/sf/ AND coding-agent
edits in packages/coding-agent/src/ did NOT trigger restart. Operators had to
restart manually after copy-resources / git pull / coding-agent edits.

Adds three watched paths:

1. packages/coding-agent/src — rpc-mode hosts sf_feedback / start_autonomous
   handlers, lives here. Edits must restart the sf child.

2. dist/resources/.sf-resource-build-stamp — atomic stamp updated by
   copy-resources. Watching the stamp (not the dist tree) avoids heavy
   recursive walk while picking up extension upgrades the moment they land.
   Idempotent: ensure-source-resources only updates the stamp when an actual
   rebuild ran, so no restart-loop on identical re-runs.

3. .git/HEAD — changes on pull / branch switch / commit. Catches upgrade
   flows where source moved outside this process.

Native (packages/native/) intentionally not watched — Rust build is 5–10 min,
auto-trigger would loop. Operator triggers native rebuild manually per the
existing ensure-source-resources policy.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Mikael Hugo 2026-05-17 21:03:49 +02:00
parent 1ac2527b36
commit 70d89eebec
3 changed files with 190 additions and 0 deletions

View file

@ -39,10 +39,24 @@ const watchEnabled =
const watchedRoots = [
resolve(root, "packages", "daemon", "src"),
resolve(root, "packages", "daemon", "package.json"),
// coding-agent hosts the rpc-mode child the server spawns; edits here
// must trigger a restart to make sf_feedback / start_autonomous handlers
// reflect new code.
resolve(root, "packages", "coding-agent", "src"),
resolve(root, "scripts", "dev-server.js"),
resolve(root, "scripts", "copy-resources.cjs"),
resolve(root, "scripts", "ensure-source-resources.cjs"),
resolve(root, "package.json"),
// SF extension dist stamp — copy-resources updates this atomically once
// src/resources/extensions/sf changes are built. Watching the stamp file
// (not the dist tree) avoids a heavy recursive walk while still picking
// up extension upgrades the moment they land.
resolve(root, "dist", "resources", ".sf-resource-build-stamp"),
// Git HEAD — changes on pull / branch switch / commit. Catches "upgrade"
// flows where source code changed outside this process (operator pulled
// a remote update). Combined with the build stamp, this closes the
// "automatic upgrade detection" gap without git polling.
resolve(root, ".git", "HEAD"),
];
function newestMtimeMs(path) {

View file

@ -0,0 +1,93 @@
import assert from "node:assert/strict";
import { mkdtempSync, rmSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { afterEach, test } from "vitest";
import { finalizeRunawayGuardFailure } from "../auto-timers.js";
import { readUnitRuntimeRecord } from "../uok/unit-runtime.js";
const tmpRoots = [];
afterEach(() => {
for (const dir of tmpRoots.splice(0)) {
rmSync(dir, { recursive: true, force: true });
}
});
function makeProject() {
const root = mkdtempSync(join(tmpdir(), "sf-auto-timers-"));
tmpRoots.push(root);
return root;
}
test("zero_progress_fail_closes_lineage_clears_timers_and_unblocks_unit", async () => {
const root = makeProject();
const startedAt = Date.now();
const handles = {
unitTimeoutHandle: setTimeout(() => {}, 60_000),
wrapupWarningHandle: setTimeout(() => {}, 60_000),
idleWatchdogHandle: setInterval(() => {}, 60_000),
continueHereHandle: setInterval(() => {}, 60_000),
};
const s = {
basePath: root,
currentUnit: { type: "challenge", id: "M048/S04/challenge", startedAt },
currentUnitModel: { provider: "minimax", id: "MiniMax-M2.7" },
...handles,
};
const messages = [];
const blocked = [];
const feedback = [];
const resolved = [];
await finalizeRunawayGuardFailure(
{
s,
unitType: "challenge",
unitId: "M048/S04/challenge",
buildSnapshotOpts: () => ({ traceId: "trace-1" }),
ctx: {
sessionManager: { getSessionId: () => "worker-session-1" },
ui: {
notify(message, level) {
messages.push({ message, level });
},
},
},
},
{
reason: "zero-progress",
metadata: { zeroProgress: true },
},
{
async closeoutUnit() {},
blockModel(basePath, provider, id, reason) {
blocked.push({ basePath, provider, id, reason });
},
recordSelfFeedback(entry) {
feedback.push(entry);
},
resolveAgentEnd(event) {
resolved.push(event);
},
},
);
const record = readUnitRuntimeRecord(root, "challenge", "M048/S04/challenge");
assert.equal(record.status, "failed");
assert.equal(record.phase, "failed-silent-worker");
assert.equal(record.lineage.status, "failed");
assert.equal(record.lineage.currentWorkerSessionId, null);
assert.deepEqual(record.lineage.failedWorkerSessionIds, ["worker-session-1"]);
assert.equal(s.unitTimeoutHandle, null);
assert.equal(s.wrapupWarningHandle, null);
assert.equal(s.idleWatchdogHandle, null);
assert.equal(s.continueHereHandle, null);
assert.equal(blocked.length, 1);
assert.equal(blocked[0].provider, "minimax");
assert.equal(feedback.length, 1);
assert.equal(feedback[0].kind, "runaway-loop:silent-worker-failure");
assert.equal(resolved.length, 1);
assert.equal(resolved[0]._synthetic, "runaway-guard-fail");
assert.ok(messages.some((m) => m.level === "error"));
});

View file

@ -0,0 +1,83 @@
import assert from "node:assert/strict";
import {
mkdirSync,
mkdtempSync,
readFileSync,
rmSync,
writeFileSync,
} from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { parse } from "yaml";
import { afterEach, test } from "vitest";
import { setExperimentalFlag } from "../experimental.js";
import { parsePreferencesYaml } from "../preferences-loader.js";
const originalCwd = process.cwd();
const originalEnv = { ...process.env };
const tmpRoots = [];
afterEach(() => {
process.chdir(originalCwd);
process.env = { ...originalEnv };
for (const dir of tmpRoots.splice(0)) {
rmSync(dir, { recursive: true, force: true });
}
});
function makeProject(content) {
const root = mkdtempSync(join(tmpdir(), "sf-experimental-"));
tmpRoots.push(root);
const project = join(root, "project");
const home = join(root, "home");
mkdirSync(join(project, ".sf"), { recursive: true });
mkdirSync(join(home, ".sf"), { recursive: true });
writeFileSync(join(project, ".sf", "preferences.yaml"), content, "utf-8");
process.env.HOME = home;
process.env.SF_HOME = join(home, ".sf");
process.chdir(project);
return project;
}
test("setExperimentalFlag_writes_single_yaml_document_without_frontmatter_markers", () => {
const project = makeProject(
[
"version: 1",
"experimental:",
" smoke_gate: true",
"",
"# SF Preferences",
"",
"See `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full documentation.",
"",
].join("\n"),
);
setExperimentalFlag("smoke_gate", false);
const written = readFileSync(
join(project, ".sf", "preferences.yaml"),
"utf-8",
);
assert.equal(written.startsWith("---"), false);
assert.equal((written.match(/^---$/gm) ?? []).length, 0);
assert.equal(parse(written).experimental.smoke_gate, false);
assert.match(written, /# SF Preferences/);
});
test("parsePreferencesYaml_when_legacy_raw_reference_body_exists_reads_machine_yaml", () => {
const parsed = parsePreferencesYaml(
[
"version: 1",
"experimental:",
" smoke_gate: false",
"",
"# SF Preferences",
"",
"See `~/.sf/agent/extensions/sf/docs/preferences-reference.md` for full documentation.",
"",
].join("\n"),
);
assert.equal(parsed.experimental.smoke_gate, false);
});