fix: resolve OAuth API key in buildMemoryLLMCall via modelRegistry (#2959) (#3233)

buildMemoryLLMCall called completeSimple without passing an API key,
which routed to streamSimpleAnthropic -> getEnvApiKey (env vars only).
OAuth users (Claude Max/Pro) store tokens in auth.json, so getEnvApiKey
returned undefined, the call threw, and memory extraction silently failed.

Now resolves the key eagerly via ctx.modelRegistry.getApiKey() which
checks auth.json through authStorage, matching how streamAnthropic and
the compaction orchestrator resolve credentials.

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Tom Boucher 2026-03-30 16:32:22 -04:00 committed by GitHub
parent dae746b905
commit d41ce60530
2 changed files with 93 additions and 2 deletions

View file

@ -87,14 +87,22 @@ export function buildMemoryLLMCall(ctx: ExtensionContext): LLMCallFn | null {
const selectedModel = model as Model<Api>;
// Resolve API key via modelRegistry so OAuth tokens (auth.json) are used.
// Without this, streamSimpleAnthropic only checks env vars via getEnvApiKey,
// which returns undefined for OAuth users (Claude Max / Claude Pro).
// See: https://github.com/gsd-build/gsd-2/issues/2959
const resolvedKeyPromise = ctx.modelRegistry.getApiKey(selectedModel).catch(() => undefined);
return async (system: string, user: string): Promise<string> => {
const { completeSimple } = await import('@gsd/pi-ai');
const resolvedApiKey = await resolvedKeyPromise;
const result: AssistantMessage = await completeSimple(selectedModel, {
systemPrompt: system,
messages: [{ role: 'user', content: [{ type: 'text', text: user }], timestamp: Date.now() }],
}, {
maxTokens: 2048,
temperature: 0,
...(resolvedApiKey ? { apiKey: resolvedApiKey } : {}),
});
// Extract text from response

View file

@ -1,4 +1,4 @@
import { parseMemoryResponse, _resetExtractionState } from '../memory-extractor.ts';
import { parseMemoryResponse, _resetExtractionState, buildMemoryLLMCall } from '../memory-extractor.ts';
import {
openDatabase,
closeDatabase,
@ -9,7 +9,7 @@ import {
getActiveMemoriesRanked,
} from '../memory-store.ts';
import type { MemoryAction } from '../memory-store.ts';
import { describe, test, beforeEach, afterEach } from 'node:test';
import { describe, test, beforeEach, afterEach, mock } from 'node:test';
import assert from 'node:assert/strict';
// ═══════════════════════════════════════════════════════════════════════════
@ -169,3 +169,86 @@ test('memory-extractor: reset extraction state', () => {
assert.ok(true, '_resetExtractionState should not throw');
});
// ═══════════════════════════════════════════════════════════════════════════
// memory-extractor: buildMemoryLLMCall resolves OAuth API key via modelRegistry
// Regression test for #2959 — OAuth users had broken memory extraction
// because streamSimpleAnthropic only checked env vars, not auth.json.
// ═══════════════════════════════════════════════════════════════════════════
test('memory-extractor: buildMemoryLLMCall resolves API key from modelRegistry for OAuth users', async () => {
const OAUTH_TOKEN = 'sk-ant-oat-test-oauth-token-12345';
let getApiKeyCalled = false;
const fakeModel = {
id: 'claude-haiku-test',
provider: 'anthropic',
api: 'anthropic-messages',
cost: { input: 0.25, output: 1.25 },
};
const ctx = {
modelRegistry: {
getAvailable: () => [fakeModel],
getApiKey: async (_model: any) => {
getApiKeyCalled = true;
return OAUTH_TOKEN;
},
},
} as any;
const llmCallFn = buildMemoryLLMCall(ctx);
assert.ok(llmCallFn !== null, 'buildMemoryLLMCall should return a function when models are available');
// The function should have resolved the API key eagerly via modelRegistry.getApiKey.
// Give the async getApiKey a tick to resolve.
await new Promise(resolve => setTimeout(resolve, 50));
assert.ok(getApiKeyCalled, 'buildMemoryLLMCall must call modelRegistry.getApiKey() to resolve OAuth tokens');
});
test('memory-extractor: buildMemoryLLMCall returns null when no models available', () => {
const ctx = {
modelRegistry: {
getAvailable: () => [],
getApiKey: async () => undefined,
},
} as any;
const llmCallFn = buildMemoryLLMCall(ctx);
assert.strictEqual(llmCallFn, null, 'should return null when no models available');
});
test('memory-extractor: buildMemoryLLMCall prefers haiku model', async () => {
let resolvedModelId: string | undefined;
const haikuModel = {
id: 'claude-3-5-haiku-20241022',
provider: 'anthropic',
api: 'anthropic-messages',
cost: { input: 0.25, output: 1.25 },
};
const sonnetModel = {
id: 'claude-sonnet-4-20250514',
provider: 'anthropic',
api: 'anthropic-messages',
cost: { input: 3, output: 15 },
};
const ctx = {
modelRegistry: {
getAvailable: () => [sonnetModel, haikuModel],
getApiKey: async (model: any) => {
resolvedModelId = model.id;
return 'sk-ant-oat-test-token';
},
},
} as any;
const llmCallFn = buildMemoryLLMCall(ctx);
assert.ok(llmCallFn !== null, 'should return a function');
// Wait for the async getApiKey to resolve
await new Promise(resolve => setTimeout(resolve, 50));
assert.strictEqual(resolvedModelId, 'claude-3-5-haiku-20241022',
'should resolve API key for haiku model, not sonnet');
});