Cherry-pick 4 critical recovery fixes from pi-mono upstream

- agent-loop: wrap afterToolCall in try/catch so hook throws don't crash
  parallel tool batches (#3084)
- retry-handler: add "connection lost" to retryable error patterns (#3317)
- rpc-mode: redirect console.log to stderr to protect JSON stdout (#2388)
- openai-completions: ignore null/non-object chunks in stream (#2466)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Mikael Hugo 2026-04-18 14:28:15 +02:00
parent 28f0c91120
commit aff49e52aa
4 changed files with 34 additions and 20 deletions

View file

@ -753,6 +753,7 @@ async function finalizeExecutedToolCall(
let isError = executed.isError; let isError = executed.isError;
if (config.afterToolCall) { if (config.afterToolCall) {
try {
const afterResult = await config.afterToolCall( const afterResult = await config.afterToolCall(
{ {
assistantMessage, assistantMessage,
@ -771,6 +772,10 @@ async function finalizeExecutedToolCall(
}; };
isError = afterResult.isError !== undefined ? afterResult.isError : isError; isError = afterResult.isError !== undefined ? afterResult.isError : isError;
} }
} catch (error) {
result = createErrorToolResult(error instanceof Error ? error.message : String(error));
isError = true;
}
} }
return emitToolCallOutcome(prepared.toolCall, result, isError, stream); return emitToolCallOutcome(prepared.toolCall, result, isError, stream);

View file

@ -123,6 +123,8 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions", OpenA
}; };
for await (const chunk of openaiStream) { for await (const chunk of openaiStream) {
if (!chunk || typeof chunk !== "object") continue;
if (chunk.usage) { if (chunk.usage) {
const cachedTokens = chunk.usage.prompt_tokens_details?.cached_tokens || 0; const cachedTokens = chunk.usage.prompt_tokens_details?.cached_tokens || 0;
const reasoningTokens = chunk.usage.completion_tokens_details?.reasoning_tokens || 0; const reasoningTokens = chunk.usage.completion_tokens_details?.reasoning_tokens || 0;
@ -148,7 +150,7 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions", OpenA
calculateCost(model, output.usage); calculateCost(model, output.usage);
} }
const choice = chunk.choices?.[0]; const choice = Array.isArray(chunk.choices) ? chunk.choices[0] : undefined;
if (!choice) continue; if (!choice) continue;
if (choice.finish_reason) { if (choice.finish_reason) {

View file

@ -116,7 +116,7 @@ export class RetryHandler {
// generated error from getApiKey() when credentials are in a backoff window. // generated error from getApiKey() when credentials are in a backoff window.
// Re-entering the retry handler for that message creates a cascade of empty // Re-entering the retry handler for that message creates a cascade of empty
// error entries in the session file, breaking resume (#3429). // error entries in the session file, breaking resume (#3429).
return /overloaded|rate.?limit|too many requests|402|429|500|502|503|504|service.?unavailable|server.?error|internal.?error|connection.?error|connection.?refused|other side closed|fetch failed|upstream.?connect|reset before headers|terminated|retry delay|network.?(?:is\s+)?unavailable|credentials.*expired|requires more credits|can only afford|insufficient credits|not enough credits|extra usage is required|(?:out of|no) extra usage|third.party.*draw from extra|third.party.*not.*available/i.test( return /overloaded|rate.?limit|too many requests|402|429|500|502|503|504|service.?unavailable|server.?error|internal.?error|connection.?error|connection.?refused|connection.?lost|other side closed|fetch failed|upstream.?connect|reset before headers|terminated|retry delay|network.?(?:is\s+)?unavailable|credentials.*expired|requires more credits|can only afford|insufficient credits|not enough credits|extra usage is required|(?:out of|no) extra usage|third.party.*draw from extra|third.party.*not.*available/i.test(
err, err,
); );
} }

View file

@ -50,8 +50,15 @@ export type {
* Listens for JSON commands on stdin, outputs events and responses on stdout. * Listens for JSON commands on stdin, outputs events and responses on stdout.
*/ */
export async function runRpcMode(session: AgentSession): Promise<never> { export async function runRpcMode(session: AgentSession): Promise<never> {
const rawStdoutWrite = process.stdout.write.bind(process.stdout);
const rawStderrWrite = process.stderr.write.bind(process.stderr);
process.stdout.write = ((
...args: Parameters<typeof process.stdout.write>
): ReturnType<typeof process.stdout.write> => rawStderrWrite(...args)) as typeof process.stdout.write;
const output = (obj: RpcResponse | RpcExtensionUIRequest | object) => { const output = (obj: RpcResponse | RpcExtensionUIRequest | object) => {
process.stdout.write(serializeJsonLine(obj)); rawStdoutWrite(serializeJsonLine(obj));
}; };
const success = <T extends RpcCommand["type"]>( const success = <T extends RpcCommand["type"]>(