port(pi-mono): omit tools field instead of sending empty array (refs 3e0ee69b5)

Pi-mono Tier 0 #2 — sf-driven port of PR #3650.

Some LLM providers reject API calls when `tools: []` is sent (an empty
array), but accept the call when the tools field is omitted entirely.
This guards each provider's request-body builder to omit `tools` when
the tool list is empty, instead of serialising the empty array.

Files (5 provider builders):
- packages/pi-ai/src/providers/openai-completions.ts
- packages/pi-ai/src/providers/openai-responses.ts
- packages/pi-ai/src/providers/openai-codex-responses.ts
- packages/pi-ai/src/providers/azure-openai-responses.ts
- packages/pi-ai/src/providers/anthropic-shared.ts (covers anthropic
  and anthropic-vertex which both import buildParams from it)

Pattern: `if (context.tools)` → `if (context.tools && context.tools.length > 0)`.

Preserved: the `else if (hasToolHistory(context.messages))` branch in
openai-completions.ts that intentionally emits `tools: []` for
LiteLLM/Anthropic-proxy compatibility is unchanged.

Type-check passes.

Co-Authored-By: sf v2.75.1 (session 38ed0a48)
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Mikael Hugo 2026-04-29 14:22:31 +02:00
parent 701ec8fb88
commit 58b1d7c601
5 changed files with 5 additions and 5 deletions

View file

@ -473,7 +473,7 @@ export function buildParams(
params.temperature = options.temperature;
}
if (context.tools) {
if (context.tools && context.tools.length > 0) {
params.tools = convertTools(context.tools, isOAuthToken, cacheControl);
}

View file

@ -217,7 +217,7 @@ function buildParams(
params.temperature = options?.temperature;
}
if (context.tools) {
if (context.tools && context.tools.length > 0) {
params.tools = convertResponsesTools(context.tools);
}

View file

@ -312,7 +312,7 @@ function buildRequestBody(
body.temperature = options.temperature;
}
if (context.tools) {
if (context.tools && context.tools.length > 0) {
body.tools = convertResponsesTools(context.tools, { strict: null });
}

View file

@ -344,7 +344,7 @@ function buildParams(model: Model<"openai-completions">, context: Context, optio
params.temperature = options.temperature;
}
if (context.tools) {
if (context.tools && context.tools.length > 0) {
params.tools = convertTools(context.tools, compat);
maybeAddOpenRouterAnthropicToolCacheControl(model, params.tools);
} else if (hasToolHistory(context.messages)) {

View file

@ -156,7 +156,7 @@ function buildParams(model: Model<"openai-responses">, context: Context, options
params.service_tier = options.serviceTier;
}
if (context.tools) {
if (context.tools && context.tools.length > 0) {
params.tools = convertResponsesTools(context.tools);
}