From 58b1d7c60116988834616638f8a050e3622cc940 Mon Sep 17 00:00:00 2001 From: Mikael Hugo Date: Wed, 29 Apr 2026 14:22:31 +0200 Subject: [PATCH] port(pi-mono): omit tools field instead of sending empty array (refs 3e0ee69b5) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Pi-mono Tier 0 #2 — sf-driven port of PR #3650. Some LLM providers reject API calls when `tools: []` is sent (an empty array), but accept the call when the tools field is omitted entirely. This guards each provider's request-body builder to omit `tools` when the tool list is empty, instead of serialising the empty array. Files (5 provider builders): - packages/pi-ai/src/providers/openai-completions.ts - packages/pi-ai/src/providers/openai-responses.ts - packages/pi-ai/src/providers/openai-codex-responses.ts - packages/pi-ai/src/providers/azure-openai-responses.ts - packages/pi-ai/src/providers/anthropic-shared.ts (covers anthropic and anthropic-vertex which both import buildParams from it) Pattern: `if (context.tools)` → `if (context.tools && context.tools.length > 0)`. Preserved: the `else if (hasToolHistory(context.messages))` branch in openai-completions.ts that intentionally emits `tools: []` for LiteLLM/Anthropic-proxy compatibility is unchanged. Type-check passes. Co-Authored-By: sf v2.75.1 (session 38ed0a48) Co-Authored-By: Claude Sonnet 4.6 --- packages/pi-ai/src/providers/anthropic-shared.ts | 2 +- packages/pi-ai/src/providers/azure-openai-responses.ts | 2 +- packages/pi-ai/src/providers/openai-codex-responses.ts | 2 +- packages/pi-ai/src/providers/openai-completions.ts | 2 +- packages/pi-ai/src/providers/openai-responses.ts | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/pi-ai/src/providers/anthropic-shared.ts b/packages/pi-ai/src/providers/anthropic-shared.ts index 342c3fa11..1e8f78f9b 100644 --- a/packages/pi-ai/src/providers/anthropic-shared.ts +++ b/packages/pi-ai/src/providers/anthropic-shared.ts @@ -473,7 +473,7 @@ export function buildParams( params.temperature = options.temperature; } - if (context.tools) { + if (context.tools && context.tools.length > 0) { params.tools = convertTools(context.tools, isOAuthToken, cacheControl); } diff --git a/packages/pi-ai/src/providers/azure-openai-responses.ts b/packages/pi-ai/src/providers/azure-openai-responses.ts index 9dac639d4..7067f728b 100644 --- a/packages/pi-ai/src/providers/azure-openai-responses.ts +++ b/packages/pi-ai/src/providers/azure-openai-responses.ts @@ -217,7 +217,7 @@ function buildParams( params.temperature = options?.temperature; } - if (context.tools) { + if (context.tools && context.tools.length > 0) { params.tools = convertResponsesTools(context.tools); } diff --git a/packages/pi-ai/src/providers/openai-codex-responses.ts b/packages/pi-ai/src/providers/openai-codex-responses.ts index fa130515b..9ff27b767 100644 --- a/packages/pi-ai/src/providers/openai-codex-responses.ts +++ b/packages/pi-ai/src/providers/openai-codex-responses.ts @@ -312,7 +312,7 @@ function buildRequestBody( body.temperature = options.temperature; } - if (context.tools) { + if (context.tools && context.tools.length > 0) { body.tools = convertResponsesTools(context.tools, { strict: null }); } diff --git a/packages/pi-ai/src/providers/openai-completions.ts b/packages/pi-ai/src/providers/openai-completions.ts index a67f9219d..cdeabccbf 100644 --- a/packages/pi-ai/src/providers/openai-completions.ts +++ b/packages/pi-ai/src/providers/openai-completions.ts @@ -344,7 +344,7 @@ function buildParams(model: Model<"openai-completions">, context: Context, optio params.temperature = options.temperature; } - if (context.tools) { + if (context.tools && context.tools.length > 0) { params.tools = convertTools(context.tools, compat); maybeAddOpenRouterAnthropicToolCacheControl(model, params.tools); } else if (hasToolHistory(context.messages)) { diff --git a/packages/pi-ai/src/providers/openai-responses.ts b/packages/pi-ai/src/providers/openai-responses.ts index d0e6222af..3e00f576c 100644 --- a/packages/pi-ai/src/providers/openai-responses.ts +++ b/packages/pi-ai/src/providers/openai-responses.ts @@ -156,7 +156,7 @@ function buildParams(model: Model<"openai-responses">, context: Context, options params.service_tier = options.serviceTier; } - if (context.tools) { + if (context.tools && context.tools.length > 0) { params.tools = convertResponsesTools(context.tools); }