fix: symlink extensions + silent catches masking real errors
Real bugs from 2nd-pass scan:
1. extension-registry.ts: discoverAllManifests skipped symlinked extension
dirs because Dirent.isDirectory() returns false for symlinks. Dev-workflow
symlinks under ~/.sf/agent/extensions/ were invisible to list/enable/
disable/info. Matches the regression documented in
symlink-extension-discovery.test.ts — the test inlines the correct logic,
but this callsite still had the buggy form. Now accepts isDirectory() ||
isSymbolicLink().
2. headless.ts SIGINT handler: client.stop() failures were double-silenced
(inner .catch(()=>{}), outer try{}catch{}). Interactive mode logs stop
errors to stderr. Restored head/headless parity — still fire-and-forget
(exit code is forced via process.exit) but failures are observable.
3. openai-codex-responses.ts SSE parser: malformed data frames were silently
dropped so broken streams looked identical to clean ones. Now debug-logs
the parse error with the chunk context so broken streams are
distinguishable in logs. Stream continues on bad chunk (one bad frame
shouldn't kill the whole generation).
4. web/cleanup-service.ts generated script: bare 'catch {}' around four native
git calls (nativeBranchList, nativeDetectMainBranch, nativeBranchListMerged,
nativeForEachRef). A failed main-branch detection silently left mainBranch
undefined-shaped, then the next native call operated on garbage. Now emits
console.warn so failures surface in the subprocess log.
5. web/undo-service.ts generated script: git revert failure was silenced;
when --no-commit failed, user saw commitsReverted=0 with no reason. Now
logs the revert error before attempting --abort (abort itself remains
best-effort silent).
False positives from the same scan (investigated and dismissed):
- auto-worktree.ts #2505: code uses ':(exclude).sf/milestones' pathspec +
shelter-and-restore, which is a better fix than the 'drop --include-untracked'
approach the test comment describes. Test comment is stale; source is correct.
- Lifecycle handler unhandled rejections across 5 extensions: extensions/runner.ts
already try/catches handler invocations and routes to emitError. Wrapping the
individual handlers would be redundant.
This commit is contained in:
parent
0f94341b43
commit
51b65fd490
5 changed files with 28 additions and 8 deletions
|
|
@ -438,7 +438,14 @@ async function* parseSSE(response: Response): AsyncGenerator<Record<string, unkn
|
|||
if (data && data !== "[DONE]") {
|
||||
try {
|
||||
yield JSON.parse(data);
|
||||
} catch {}
|
||||
} catch (err) {
|
||||
// Malformed SSE JSON chunk — surface at debug so broken streams are
|
||||
// distinguishable from clean ones in logs. Continue so one bad chunk
|
||||
// doesn't terminate the stream.
|
||||
console.debug(
|
||||
`[openai-codex-responses] SSE JSON parse failed: ${err instanceof Error ? err.message : String(err)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
idx = buffer.indexOf("\n\n");
|
||||
|
|
|
|||
|
|
@ -182,7 +182,11 @@ export function discoverAllManifests(extensionsDir: string): Map<string, Extensi
|
|||
if (!existsSync(extensionsDir)) return manifests;
|
||||
|
||||
for (const entry of readdirSync(extensionsDir, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
// Accept both real directories and directory symlinks. Dirent.isDirectory()
|
||||
// returns false for symlinks even when they point at a directory, so dev-workflow
|
||||
// symlinked extensions under ~/.sf/agent/extensions/ are invisible otherwise
|
||||
// (regression tested in symlink-extension-discovery.test.ts).
|
||||
if (!entry.isDirectory() && !entry.isSymbolicLink()) continue;
|
||||
const manifest = readManifest(join(extensionsDir, entry.name));
|
||||
if (manifest) {
|
||||
manifests.set(manifest.id, manifest);
|
||||
|
|
|
|||
|
|
@ -749,7 +749,16 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number):
|
|||
// Kill child process — don't await, just fire and exit.
|
||||
// The main flow may be awaiting a promise that resolves when the child dies,
|
||||
// which would race with this handler. Exit synchronously to ensure correct exit code.
|
||||
try { client.stop().catch(() => {}) } catch {}
|
||||
// Log stop failures to stderr for head/headless parity — interactive-mode logs its
|
||||
// stop errors too. Exit code is already forced via process.exit, so logging is
|
||||
// purely observability and doesn't change shutdown semantics.
|
||||
try {
|
||||
client.stop().catch((err: unknown) => {
|
||||
process.stderr.write(`[headless] client.stop() rejected: ${err instanceof Error ? err.message : String(err)}\n`)
|
||||
})
|
||||
} catch (err) {
|
||||
process.stderr.write(`[headless] client.stop() threw: ${err instanceof Error ? err.message : String(err)}\n`)
|
||||
}
|
||||
if (timeoutTimer) clearTimeout(timeoutTimer)
|
||||
if (idleTimer) clearTimeout(idleTimer)
|
||||
// Emit batch JSON result if in json mode before exiting
|
||||
|
|
|
|||
|
|
@ -42,17 +42,17 @@ export async function collectCleanupData(projectCwdOverride?: string): Promise<C
|
|||
'const basePath = process.env.SF_CLEANUP_BASE;',
|
||||
// Get all SF branches
|
||||
'let branches = [];',
|
||||
'try { branches = mod.nativeBranchList(basePath, "sf/*"); } catch {}',
|
||||
'try { branches = mod.nativeBranchList(basePath, "sf/*"); } catch (e) { console.warn("[cleanup-service] nativeBranchList failed:", e && e.message ? e.message : e); }',
|
||||
// Detect main branch and find which SF branches are merged
|
||||
'let mainBranch = "main";',
|
||||
'try { mainBranch = mod.nativeDetectMainBranch(basePath); } catch {}',
|
||||
'try { mainBranch = mod.nativeDetectMainBranch(basePath); } catch (e) { console.warn("[cleanup-service] nativeDetectMainBranch failed — falling back to \\"main\\":", e && e.message ? e.message : e); }',
|
||||
'let merged = [];',
|
||||
'try { merged = mod.nativeBranchListMerged(basePath, mainBranch, "sf/*"); } catch {}',
|
||||
'try { merged = mod.nativeBranchListMerged(basePath, mainBranch, "sf/*"); } catch (e) { console.warn("[cleanup-service] nativeBranchListMerged failed:", e && e.message ? e.message : e); }',
|
||||
'const mergedSet = new Set(merged);',
|
||||
'const branchList = branches.map(b => ({ name: b, merged: mergedSet.has(b) }));',
|
||||
// Get snapshot refs
|
||||
'let refs = [];',
|
||||
'try { refs = mod.nativeForEachRef(basePath, "refs/sf/snapshots/"); } catch {}',
|
||||
'try { refs = mod.nativeForEachRef(basePath, "refs/sf/snapshots/"); } catch (e) { console.warn("[cleanup-service] nativeForEachRef failed:", e && e.message ? e.message : e); }',
|
||||
'const snapshotList = refs.map(r => {',
|
||||
' const parts = r.split(" ");',
|
||||
' return { ref: parts[0] || r, date: parts.length > 1 ? parts.slice(1).join(" ") : "" };',
|
||||
|
|
|
|||
|
|
@ -163,7 +163,7 @@ export async function executeUndo(projectCwdOverride?: string): Promise<UndoResu
|
|||
' const { execFileSync } = await import("node:child_process");',
|
||||
' for (const sha of commits.reverse()) {',
|
||||
' try { execFileSync("git", ["revert", "--no-commit", sha], { cwd: basePath, stdio: "pipe" }); commitsReverted++; }',
|
||||
' catch { try { execFileSync("git", ["revert", "--abort"], { cwd: basePath, stdio: "pipe" }); } catch {} break; }',
|
||||
' catch (e) { console.warn("[undo-service] git revert failed for " + sha + ":", e && e.message ? e.message : e); try { execFileSync("git", ["revert", "--abort"], { cwd: basePath, stdio: "pipe" }); } catch {} break; }',
|
||||
' }',
|
||||
' }',
|
||||
'}',
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue