chore(pi-ai): typecheck cleanup, empty-catch comments, OAuth audit notes
- package.json: add 'typecheck' script (build:pi + tsc --noEmit) so pi-ai and pi-coding-agent typecheck under the same command surface SF uses. - anthropic-shared.ts: replace 'as any' casts with proper Anthropic SDK types (ServerToolUseBlockParam, WebSearchToolResultBlockParam, CacheControlEphemeral). The cache_control variant is documented inline so the cast is auditable. - openai-completions.ts: drop the 'as any' on stream_options — the type system can verify the assignment now. - openai-codex-responses.ts, package-manager.ts, skills.ts: annotate the three remaining empty catches with one-line WHY comments (best-effort cleanup, malformed ignore files, partial directory traversal). Empty catch with no rationale is an SF012 anti-pattern; with rationale it is a deliberate fallback. - oauth/github-copilot.ts, oauth/openai-codex.ts: add UPSTREAM AUDIT blocks documenting why these hand-rolled OAuth flows stay hand-rolled rather than delegating to @octokit/auth-oauth-device or @openai/codex. AbortSignal coverage and provider-specific surface area are the gating concerns; re-audit triggers are named.
This commit is contained in:
parent
e6a2ec0a8f
commit
2846c296ee
8 changed files with 85 additions and 16 deletions
|
|
@ -88,6 +88,7 @@
|
||||||
"sync-pkg-version": "node scripts/sync-pkg-version.cjs",
|
"sync-pkg-version": "node scripts/sync-pkg-version.cjs",
|
||||||
"sync-platform-versions": "node rust-engine/scripts/sync-platform-versions.cjs",
|
"sync-platform-versions": "node rust-engine/scripts/sync-platform-versions.cjs",
|
||||||
"validate-pack": "node scripts/validate-pack.js",
|
"validate-pack": "node scripts/validate-pack.js",
|
||||||
|
"typecheck": "npm run build:pi && tsc --noEmit",
|
||||||
"typecheck:extensions": "npm run check:versioned-json && tsc --noEmit --project tsconfig.extensions.json",
|
"typecheck:extensions": "npm run check:versioned-json && tsc --noEmit --project tsconfig.extensions.json",
|
||||||
"check:versioned-json": "node scripts/check-versioned-json.mjs",
|
"check:versioned-json": "node scripts/check-versioned-json.mjs",
|
||||||
"lint": "npm run check:versioned-json && biome check src/",
|
"lint": "npm run check:versioned-json && biome check src/",
|
||||||
|
|
|
||||||
|
|
@ -3,10 +3,14 @@
|
||||||
*/
|
*/
|
||||||
import type Anthropic from "@anthropic-ai/sdk";
|
import type Anthropic from "@anthropic-ai/sdk";
|
||||||
import type {
|
import type {
|
||||||
|
CacheControlEphemeral,
|
||||||
ContentBlockParam,
|
ContentBlockParam,
|
||||||
MessageCreateParamsStreaming,
|
MessageCreateParamsStreaming,
|
||||||
MessageParam,
|
MessageParam,
|
||||||
|
RawContentBlockStartEvent,
|
||||||
RawMessageStreamEvent,
|
RawMessageStreamEvent,
|
||||||
|
ServerToolUseBlockParam,
|
||||||
|
WebSearchToolResultBlockParam,
|
||||||
} from "@anthropic-ai/sdk/resources/messages.js";
|
} from "@anthropic-ai/sdk/resources/messages.js";
|
||||||
import { calculateCost } from "../models.js";
|
import { calculateCost } from "../models.js";
|
||||||
import type {
|
import type {
|
||||||
|
|
@ -329,15 +333,15 @@ export function convertMessages(
|
||||||
blocks.push({
|
blocks.push({
|
||||||
type: "server_tool_use",
|
type: "server_tool_use",
|
||||||
id: block.id,
|
id: block.id,
|
||||||
name: block.name,
|
name: block.name as ServerToolUseBlockParam["name"],
|
||||||
input: block.input ?? {},
|
input: block.input ?? {},
|
||||||
} as any);
|
} as ServerToolUseBlockParam);
|
||||||
} else if (block.type === "webSearchResult") {
|
} else if (block.type === "webSearchResult") {
|
||||||
blocks.push({
|
blocks.push({
|
||||||
type: "web_search_tool_result",
|
type: "web_search_tool_result",
|
||||||
tool_use_id: block.toolUseId,
|
tool_use_id: block.toolUseId,
|
||||||
content: block.content,
|
content: block.content,
|
||||||
} as any);
|
} as WebSearchToolResultBlockParam);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (blocks.length === 0) continue;
|
if (blocks.length === 0) continue;
|
||||||
|
|
@ -385,7 +389,10 @@ export function convertMessages(
|
||||||
lastBlock &&
|
lastBlock &&
|
||||||
(lastBlock.type === "text" || lastBlock.type === "image" || lastBlock.type === "tool_result")
|
(lastBlock.type === "text" || lastBlock.type === "image" || lastBlock.type === "tool_result")
|
||||||
) {
|
) {
|
||||||
(lastBlock as any).cache_control = cacheControl;
|
// TextBlockParam, ImageBlockParam, and ToolResultBlockParam all
|
||||||
|
// carry cache_control?: CacheControlEphemeral | null — the type
|
||||||
|
// guard above narrows to exactly those three variants.
|
||||||
|
(lastBlock as { cache_control?: CacheControlEphemeral | null }).cache_control = cacheControl;
|
||||||
}
|
}
|
||||||
} else if (typeof lastMessage.content === "string") {
|
} else if (typeof lastMessage.content === "string") {
|
||||||
lastMessage.content = [
|
lastMessage.content = [
|
||||||
|
|
@ -394,7 +401,7 @@ export function convertMessages(
|
||||||
text: lastMessage.content,
|
text: lastMessage.content,
|
||||||
cache_control: cacheControl,
|
cache_control: cacheControl,
|
||||||
},
|
},
|
||||||
] as any;
|
];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -409,8 +416,10 @@ export function convertTools(
|
||||||
): Anthropic.Messages.Tool[] {
|
): Anthropic.Messages.Tool[] {
|
||||||
if (!tools) return [];
|
if (!tools) return [];
|
||||||
|
|
||||||
const result = tools.map((tool) => {
|
const result: Anthropic.Messages.Tool[] = tools.map((tool) => {
|
||||||
const jsonSchema = tool.parameters as any;
|
// TSchema extends SchemaOptions which carries [prop: string]: any,
|
||||||
|
// so .properties and .required are accessible without a cast.
|
||||||
|
const jsonSchema = tool.parameters;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: isOAuthToken ? toClaudeCodeName(tool.name) : tool.name,
|
name: isOAuthToken ? toClaudeCodeName(tool.name) : tool.name,
|
||||||
|
|
@ -418,14 +427,15 @@ export function convertTools(
|
||||||
input_schema: {
|
input_schema: {
|
||||||
type: "object" as const,
|
type: "object" as const,
|
||||||
properties: jsonSchema.properties || {},
|
properties: jsonSchema.properties || {},
|
||||||
required: jsonSchema.required || [],
|
required: (jsonSchema.required as string[] | undefined) || [],
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add cache breakpoint to last tool — covers entire tool block
|
// Add cache breakpoint to last tool — covers entire tool block.
|
||||||
|
// Anthropic.Messages.Tool carries cache_control?: CacheControlEphemeral | null.
|
||||||
if (cacheControl && result.length > 0) {
|
if (cacheControl && result.length > 0) {
|
||||||
(result[result.length - 1] as any).cache_control = cacheControl;
|
result[result.length - 1].cache_control = cacheControl;
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
|
|
|
||||||
|
|
@ -512,7 +512,9 @@ function isWebSocketReusable(socket: WebSocketLike): boolean {
|
||||||
function closeWebSocketSilently(socket: WebSocketLike, code = 1000, reason = "done"): void {
|
function closeWebSocketSilently(socket: WebSocketLike, code = 1000, reason = "done"): void {
|
||||||
try {
|
try {
|
||||||
socket.close(code, reason);
|
socket.close(code, reason);
|
||||||
} catch {}
|
} catch {
|
||||||
|
// best-effort: socket may already be closed or in an invalid state; ignore
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function scheduleSessionWebSocketExpiry(sessionId: string, entry: CachedWebSocketConnection): void {
|
function scheduleSessionWebSocketExpiry(sessionId: string, entry: CachedWebSocketConnection): void {
|
||||||
|
|
@ -875,7 +877,9 @@ async function parseErrorResponse(response: Response): Promise<{ message: string
|
||||||
}
|
}
|
||||||
message = err.message || friendlyMessage || message;
|
message = err.message || friendlyMessage || message;
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch {
|
||||||
|
// best-effort: response may not be JSON; fallback message already set above
|
||||||
|
}
|
||||||
|
|
||||||
return { message, friendlyMessage };
|
return { message, friendlyMessage };
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,13 +8,16 @@ import type {
|
||||||
ChatCompletionContentPartImage,
|
ChatCompletionContentPartImage,
|
||||||
ChatCompletionContentPartText,
|
ChatCompletionContentPartText,
|
||||||
ChatCompletionMessageParam,
|
ChatCompletionMessageParam,
|
||||||
|
ChatCompletionReasoningEffort,
|
||||||
ChatCompletionToolMessageParam,
|
ChatCompletionToolMessageParam,
|
||||||
} from "openai/resources/chat/completions.js";
|
} from "openai/resources/chat/completions.js";
|
||||||
|
import type { FunctionParameters } from "openai/resources/shared.js";
|
||||||
import { getEnvApiKey } from "../env-api-keys.js";
|
import { getEnvApiKey } from "../env-api-keys.js";
|
||||||
import { calculateCost, supportsXhigh } from "../models.js";
|
import { calculateCost, supportsXhigh } from "../models.js";
|
||||||
import type {
|
import type {
|
||||||
AssistantMessage,
|
AssistantMessage,
|
||||||
Context,
|
Context,
|
||||||
|
ImageContent,
|
||||||
Message,
|
Message,
|
||||||
Model,
|
Model,
|
||||||
OpenAICompletionsCompat,
|
OpenAICompletionsCompat,
|
||||||
|
|
@ -326,7 +329,7 @@ function buildParams(model: Model<"openai-completions">, context: Context, optio
|
||||||
};
|
};
|
||||||
|
|
||||||
if (compat.supportsUsageInStreaming !== false) {
|
if (compat.supportsUsageInStreaming !== false) {
|
||||||
(params as any).stream_options = { include_usage: true };
|
params.stream_options = { include_usage: true };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (compat.supportsStore) {
|
if (compat.supportsStore) {
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,25 @@
|
||||||
/**
|
/**
|
||||||
* GitHub Copilot OAuth flow
|
* GitHub Copilot OAuth flow
|
||||||
|
*
|
||||||
|
* UPSTREAM AUDIT (2026-05-02): STAY HAND-ROLLED
|
||||||
|
*
|
||||||
|
* Candidate: @octokit/auth-oauth-device (v8.0.3)
|
||||||
|
* Coverage: device-code initiation + authorization_pending/slow_down polling — the
|
||||||
|
* ~120 LOC in startDeviceFlow + pollForGitHubAccessToken only.
|
||||||
|
* Why we're not delegating:
|
||||||
|
* 1. AbortSignal cancellation — the library has no signal/abort support; our
|
||||||
|
* abortableSleep + signal checks are load-bearing for the login-cancel UX.
|
||||||
|
* 2. 74% of this file is Copilot-proprietary with no upstream equivalent:
|
||||||
|
* - copilot_internal/v2/token exchange (refreshGitHubCopilotToken)
|
||||||
|
* - proxy-ep token parsing / base-URL derivation (getBaseUrlFromToken)
|
||||||
|
* - Model policy enablement (enableAllGitHubCopilotModels)
|
||||||
|
* - Model limits fetch (fetchCopilotModelLimits)
|
||||||
|
* - Enterprise domain normalization (normalizeDomain / getUrls)
|
||||||
|
* 3. The library would add a dependency + lose abort support for a ~26% LOC
|
||||||
|
* reduction in a 460-line file — not worth it.
|
||||||
|
*
|
||||||
|
* Re-audit trigger: if @octokit/auth-oauth-device adds AbortSignal support AND
|
||||||
|
* the Copilot-specific surface area shrinks (e.g., models API becomes public SDK).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { getModels } from "../../models.js";
|
import { getModels } from "../../models.js";
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,31 @@
|
||||||
*
|
*
|
||||||
* NOTE: This module uses Node.js crypto and http for the OAuth callback.
|
* NOTE: This module uses Node.js crypto and http for the OAuth callback.
|
||||||
* It is only intended for CLI use, not browser environments.
|
* It is only intended for CLI use, not browser environments.
|
||||||
|
*
|
||||||
|
* UPSTREAM AUDIT (2026-05-02): STAY HAND-ROLLED
|
||||||
|
*
|
||||||
|
* Candidate: @openai/codex (v0.1.x)
|
||||||
|
* Coverage: none — it is a pure CLI bin with no programmatic exports (no
|
||||||
|
* main/exports fields, no auth library surface area).
|
||||||
|
*
|
||||||
|
* Why we're not delegating:
|
||||||
|
* 1. @openai/codex is a terminal UI app (ink + react), not a library.
|
||||||
|
* It exports no OAuth helpers, PKCE utilities, or token-exchange
|
||||||
|
* functions that third parties can import.
|
||||||
|
* 2. The openai npm SDK (v4/v6) has no ChatGPT OAuth / device-code
|
||||||
|
* helper — it assumes API key auth only.
|
||||||
|
* 3. The entire login surface is Codex-specific and undocumented upstream:
|
||||||
|
* - PKCE flow against auth.openai.com (non-standard originator params)
|
||||||
|
* - Local HTTP callback server on :1455 with races against manual paste
|
||||||
|
* - JWT claim extraction (https://api.openai.com/auth path) for accountId
|
||||||
|
* - Browser-vs-paste race (onManualCodeInput) for UX resilience
|
||||||
|
* - CHATGPT_UNSUPPORTED_MODEL_IDS filter (provider-specific knowledge)
|
||||||
|
* 4. No AbortSignal gap (unlike Copilot): the PKCE flow is one-shot with
|
||||||
|
* a timeout, not a long-polling loop.
|
||||||
|
*
|
||||||
|
* Re-audit trigger: if OpenAI publishes a @openai/auth or @openai/codex-core
|
||||||
|
* library with programmatic PKCE/token helpers, or if the openai SDK gains
|
||||||
|
* OAuth support.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// NEVER convert to top-level imports - breaks browser/Vite builds (web-ui)
|
// NEVER convert to top-level imports - breaks browser/Vite builds (web-ui)
|
||||||
|
|
|
||||||
|
|
@ -161,7 +161,9 @@ function addIgnoreRules(ig: IgnoreMatcher, dir: string, rootDir: string): void {
|
||||||
if (patterns.length > 0) {
|
if (patterns.length > 0) {
|
||||||
ig.add(patterns);
|
ig.add(patterns);
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch {
|
||||||
|
// best-effort: ignore files may be inaccessible or malformed; skip silently
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -75,7 +75,9 @@ function addIgnoreRules(ig: IgnoreMatcher, dir: string, rootDir: string): void {
|
||||||
if (patterns.length > 0) {
|
if (patterns.length > 0) {
|
||||||
ig.add(patterns);
|
ig.add(patterns);
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch {
|
||||||
|
// best-effort: ignore files may be inaccessible or malformed; skip silently
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -246,7 +248,9 @@ function loadSkillsFromDirInternal(
|
||||||
}
|
}
|
||||||
diagnostics.push(...result.diagnostics);
|
diagnostics.push(...result.diagnostics);
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch {
|
||||||
|
// best-effort: if directory traversal fails catastrophically, return partial results
|
||||||
|
}
|
||||||
|
|
||||||
return { skills, diagnostics };
|
return { skills, diagnostics };
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue