singularity-forge/src/pi-migration.ts
Mikael Hugo b24f426f2b batch: snapshot of in-flight v2 work
This commit captures uncommitted modifications that accumulated in the
working tree across multiple in-progress workstreams. It is a snapshot
to clear the deck before sf v3 work begins; individual workstreams
should land separately on top of this.

Notable additions:
- trace-collector.ts, traces.ts, src/tests/trace-export.test.ts —
  trace export plumbing
- biome.json — Biome linter configuration
- .gitignore — exclude native/npm/**/*.node compiled binaries

The bulk of the diff is across src/resources/extensions/sf/ (301 files)
and src/resources/extensions/sf/tests/ (277 files), reflecting the
ongoing sf extension work. Specific feature commits should follow this
snapshot rather than being archaeology'd out of it.

The 76MB native/npm/linux-x64-gnu/forge_engine.node compiled binary
was left out of the commit — it's now gitignored and built locally.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-29 12:42:31 +02:00

92 lines
2.4 KiB
TypeScript

/**
* One-time migration of provider credentials from Pi (~/.pi/agent/auth.json)
* into SF's auth storage. Runs when SF has no LLM providers configured,
* so users with an existing Pi install skip re-authentication.
*/
import { existsSync, readFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import type {
AuthCredential,
AuthStorage,
} from "@singularity-forge/pi-coding-agent";
const PI_AUTH_PATH = join(homedir(), ".pi", "agent", "auth.json");
const PI_SETTINGS_PATH = join(homedir(), ".pi", "agent", "settings.json");
const LLM_PROVIDER_IDS = [
"anthropic",
"openai",
"github-copilot",
"openai-codex",
"google-gemini-cli",
"google",
"groq",
"xai",
"openrouter",
"mistral",
];
/**
* Migrate provider credentials from Pi's auth.json into SF's AuthStorage.
*
* Only runs when SF has no LLM provider configured and Pi's auth.json exists.
* Copies any credentials SF doesn't already have. Returns true if an LLM
* provider was migrated (so onboarding can be skipped).
*/
export function migratePiCredentials(authStorage: AuthStorage): boolean {
try {
const existing = authStorage.list();
const hasLlm = existing.some((id) => LLM_PROVIDER_IDS.includes(id));
if (hasLlm) return false;
if (!existsSync(PI_AUTH_PATH)) return false;
const raw = readFileSync(PI_AUTH_PATH, "utf-8");
const piData = JSON.parse(raw) as Record<string, AuthCredential>;
let migratedLlm = false;
for (const [providerId, credential] of Object.entries(piData)) {
if (authStorage.has(providerId)) continue;
authStorage.set(providerId, credential);
const isLlm = LLM_PROVIDER_IDS.includes(providerId);
if (isLlm) migratedLlm = true;
process.stderr.write(
`[forge] Migrated ${isLlm ? "LLM provider" : "credential"}: ${providerId} (from Pi)\n`,
);
}
return migratedLlm;
} catch {
return false;
}
}
export function getPiDefaultModelAndProvider(): {
provider: string;
model: string;
} | null {
try {
if (!existsSync(PI_SETTINGS_PATH)) return null;
const raw = readFileSync(PI_SETTINGS_PATH, "utf-8");
const data = JSON.parse(raw) as {
defaultProvider?: unknown;
defaultModel?: unknown;
};
if (
typeof data.defaultProvider !== "string" ||
typeof data.defaultModel !== "string"
) {
return null;
}
return {
provider: data.defaultProvider,
model: data.defaultModel,
};
} catch {
return null;
}
}