singularity-forge/src/cli-logs.ts
Mikael Hugo b24f426f2b batch: snapshot of in-flight v2 work
This commit captures uncommitted modifications that accumulated in the
working tree across multiple in-progress workstreams. It is a snapshot
to clear the deck before sf v3 work begins; individual workstreams
should land separately on top of this.

Notable additions:
- trace-collector.ts, traces.ts, src/tests/trace-export.test.ts —
  trace export plumbing
- biome.json — Biome linter configuration
- .gitignore — exclude native/npm/**/*.node compiled binaries

The bulk of the diff is across src/resources/extensions/sf/ (301 files)
and src/resources/extensions/sf/tests/ (277 files), reflecting the
ongoing sf extension work. Specific feature commits should follow this
snapshot rather than being archaeology'd out of it.

The 76MB native/npm/linux-x64-gnu/forge_engine.node compiled binary
was left out of the commit — it's now gitignored and built locally.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-29 12:42:31 +02:00

498 lines
13 KiB
TypeScript

import {
closeSync,
existsSync,
openSync,
readdirSync,
readFileSync,
readSync,
statSync,
unwatchFile,
watchFile,
} from "node:fs";
import { homedir } from "node:os";
import { basename, join } from "node:path";
export type LogSourceName = "notif" | "session" | "activity" | "audit";
export interface MergedLogEvent {
source: LogSourceName;
timestamp: string;
message: string;
severity?: string;
filePath: string;
}
interface SourceFile {
source: LogSourceName;
path: string;
}
export interface CollectRecentLogOptions {
basePath: string;
sfHome?: string;
source?: LogSourceName;
severity?: string;
limit?: number;
}
interface TailOptions extends CollectRecentLogOptions {
stdout?: Pick<typeof process.stdout, "write">;
stderr?: Pick<typeof process.stderr, "write">;
}
interface ParsedLogsArgs {
command: "tail" | "follow";
source?: LogSourceName;
severity?: string;
}
const SOURCE_ALIASES: Record<string, LogSourceName> = {
notif: "notif",
notification: "notif",
notifications: "notif",
session: "session",
sessions: "session",
activity: "activity",
audit: "audit",
};
function normalizeSource(value: string | undefined): LogSourceName | undefined {
if (!value) return undefined;
return SOURCE_ALIASES[value.toLowerCase()];
}
function sfHomeFromEnv(): string {
return process.env.SF_HOME || join(homedir(), ".sf");
}
export function getProjectSessionKey(basePath: string): string {
return `--${basePath.replace(/^[/\\]/, "").replace(/[/\\:]/g, "-")}--`;
}
function latestJsonlFile(dir: string): string | null {
if (!existsSync(dir)) return null;
try {
const candidates = readdirSync(dir)
.filter((file) => file.endsWith(".jsonl"))
.map((file) => {
const path = join(dir, file);
try {
const stat = statSync(path);
return { path, mtimeMs: stat.mtimeMs };
} catch {
return null;
}
})
.filter(
(entry): entry is { path: string; mtimeMs: number } => entry !== null,
);
candidates.sort(
(a, b) =>
b.mtimeMs - a.mtimeMs ||
basename(b.path).localeCompare(basename(a.path)),
);
return candidates[0]?.path ?? null;
} catch {
return null;
}
}
function resolveSourceFiles(basePath: string, sfHome: string): SourceFile[] {
const sfDir = join(basePath, ".sf");
const sessionKey = getProjectSessionKey(basePath);
const sessionFile =
latestJsonlFile(join(sfHome, "agent", "sessions", sessionKey)) ??
latestJsonlFile(join(sfHome, "sessions", sessionKey));
const activityFile = latestJsonlFile(join(sfDir, "activity"));
const files: Array<SourceFile | null> = [
{ source: "notif", path: join(sfDir, "notifications.jsonl") },
sessionFile ? { source: "session", path: sessionFile } : null,
activityFile ? { source: "activity", path: activityFile } : null,
{ source: "audit", path: join(sfDir, "audit-log.jsonl") },
];
return files.filter(
(file): file is SourceFile => file !== null && existsSync(file.path),
);
}
function readRecentLines(filePath: string, maxBytes = 256 * 1024): string[] {
let content: string;
try {
const stat = statSync(filePath);
if (stat.size <= maxBytes) {
content = readFileSync(filePath, "utf-8");
} else {
const fd = openSync(filePath, "r");
try {
const buffer = Buffer.alloc(maxBytes);
readSync(fd, buffer, 0, maxBytes, stat.size - maxBytes);
content = buffer.toString("utf-8");
const firstNewline = content.indexOf("\n");
if (firstNewline >= 0) content = content.slice(firstNewline + 1);
} finally {
closeSync(fd);
}
}
} catch {
return [];
}
return content.split(/\n/).filter((line) => line.trim().length > 0);
}
function parseTimestamp(value: unknown): string {
if (typeof value === "number" && Number.isFinite(value)) {
return new Date(value).toISOString();
}
if (typeof value === "string" && value.trim()) {
const date = new Date(value);
if (!Number.isNaN(date.getTime())) return date.toISOString();
}
return new Date().toISOString();
}
function truncateOneLine(value: string, max = 220): string {
const compact = value.replace(/\s+/g, " ").trim();
if (compact.length <= max) return compact;
return `${compact.slice(0, max - 3)}...`;
}
function textFromContent(content: unknown): string {
if (typeof content === "string") return content;
if (!Array.isArray(content)) return "";
return content
.filter(
(block): block is Record<string, unknown> =>
block && typeof block === "object",
)
.filter((block) => block.type === "text" || block.type === "thinking")
.map((block) => String(block.text ?? block.content ?? ""))
.filter(Boolean)
.join(" ");
}
function toolNamesFromContent(content: unknown): string[] {
if (!Array.isArray(content)) return [];
return content
.filter(
(block): block is Record<string, unknown> =>
block && typeof block === "object",
)
.filter((block) => block.type === "toolCall")
.map((block) => String(block.name ?? block.toolName ?? ""))
.filter(Boolean);
}
function summarizeSessionLikeEntry(
source: LogSourceName,
entry: Record<string, unknown>,
filePath: string,
): MergedLogEvent | null {
const rawMessage = entry.message as Record<string, unknown> | undefined;
const message =
rawMessage && typeof rawMessage === "object" ? rawMessage : entry;
const role = String(message.role ?? "");
const entryType = String(entry.type ?? "");
if (entryType === "tool_execution_start") {
const toolName = String(entry.toolName ?? "tool");
return {
source,
timestamp: parseTimestamp(entry.timestamp ?? entry.ts),
message: `tool: ${toolName}`,
filePath,
};
}
if (entryType === "execution_complete") {
return {
source,
timestamp: parseTimestamp(entry.timestamp ?? entry.ts),
message: `execution complete: ${String(entry.status ?? "completed")}`,
severity: entry.status === "error" ? "error" : undefined,
filePath,
};
}
if (entryType === "message_update") {
const update = entry.assistantMessageEvent as
| Record<string, unknown>
| undefined;
const updateType = String(update?.type ?? "");
if (updateType === "text_delta" || updateType === "thinking_delta") {
const delta = String(update?.delta ?? update?.text ?? "");
if (!delta.trim()) return null;
return {
source,
timestamp: parseTimestamp(entry.timestamp ?? entry.ts),
message: `assistant: ${truncateOneLine(delta)}`,
filePath,
};
}
if (updateType === "toolcall_end") {
const toolCall = update?.toolCall as Record<string, unknown> | undefined;
const toolName = String(toolCall?.name ?? "tool");
return {
source,
timestamp: parseTimestamp(entry.timestamp ?? entry.ts),
message: `tool: ${toolName}`,
filePath,
};
}
return null;
}
if (role === "assistant") {
const tools = toolNamesFromContent(message.content);
const text = truncateOneLine(textFromContent(message.content));
const stopReason =
typeof message.stopReason === "string" ? message.stopReason : undefined;
const parts: string[] = [];
if (text) parts.push(`assistant: ${text}`);
if (tools.length > 0) parts.push(`tools: ${tools.join(", ")}`);
if (stopReason) parts.push(`stop: ${stopReason}`);
if (parts.length === 0) return null;
return {
source,
timestamp: parseTimestamp(entry.timestamp ?? message.timestamp),
message: parts.join(" | "),
severity: stopReason === "error" ? "error" : undefined,
filePath,
};
}
if (role === "toolResult") {
const toolName = String(message.toolName ?? "");
if (!toolName) return null;
return {
source,
timestamp: parseTimestamp(entry.timestamp ?? message.timestamp),
message: `tool result: ${toolName}${message.isError === true ? " error" : ""}`,
severity: message.isError === true ? "error" : undefined,
filePath,
};
}
return null;
}
export function parseLogLine(
source: LogSourceName,
line: string,
filePath: string,
): MergedLogEvent | null {
let entry: Record<string, unknown>;
try {
entry = JSON.parse(line) as Record<string, unknown>;
} catch {
return null;
}
if (source === "notif") {
const message = String(entry.message ?? "");
if (!message.trim()) return null;
return {
source,
timestamp: parseTimestamp(entry.ts ?? entry.timestamp),
message: truncateOneLine(message),
severity: typeof entry.severity === "string" ? entry.severity : undefined,
filePath,
};
}
if (source === "audit") {
const component = String(entry.component ?? "audit");
const message = String(entry.message ?? entry.error ?? "");
if (!message.trim()) return null;
return {
source,
timestamp: parseTimestamp(entry.ts ?? entry.timestamp),
message: `[${component}] ${truncateOneLine(message)}`,
severity: typeof entry.severity === "string" ? entry.severity : "error",
filePath,
};
}
return summarizeSessionLikeEntry(source, entry, filePath);
}
function matchesFilters(
event: MergedLogEvent,
source?: LogSourceName,
severity?: string,
): boolean {
if (source && event.source !== source) return false;
if (
severity &&
(event.severity ?? "").toLowerCase() !== severity.toLowerCase()
)
return false;
return true;
}
export function collectRecentLogEvents(
options: CollectRecentLogOptions,
): MergedLogEvent[] {
const sfHome = options.sfHome ?? sfHomeFromEnv();
const files = resolveSourceFiles(options.basePath, sfHome);
const limit = Math.max(1, options.limit ?? 50);
const events: MergedLogEvent[] = [];
for (const file of files) {
if (options.source && file.source !== options.source) continue;
for (const line of readRecentLines(file.path)) {
const event = parseLogLine(file.source, line, file.path);
if (event && matchesFilters(event, options.source, options.severity)) {
events.push(event);
}
}
}
events.sort(
(a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime(),
);
return events.slice(-limit);
}
export function formatMergedLogEvent(event: MergedLogEvent): string {
return `${event.timestamp} [${event.source}] ${event.message}\n`;
}
function parseLogsArgs(argv: string[]): ParsedLogsArgs | null {
const args = argv.slice(1);
const sub = args[0];
if (sub !== "tail" && sub !== "follow") return null;
const parsed: ParsedLogsArgs = { command: sub };
for (let i = 1; i < args.length; i++) {
const arg = args[i];
if (arg === "--source" && i + 1 < args.length) {
parsed.source = normalizeSource(args[++i]);
} else if (arg.startsWith("--source=")) {
parsed.source = normalizeSource(arg.slice("--source=".length));
} else if (arg === "--severity" && i + 1 < args.length) {
parsed.severity = args[++i];
} else if (arg.startsWith("--severity=")) {
parsed.severity = arg.slice("--severity=".length);
}
}
return parsed;
}
function usage(): string {
return (
[
"Usage: sf logs tail|follow [--source notif|session|activity|audit] [--severity level]",
"",
"Streams notifications, latest session summaries, latest activity log, and audit errors.",
].join("\n") + "\n"
);
}
function readFromOffset(
filePath: string,
offset: number,
): { lines: string[]; offset: number } {
try {
const stat = statSync(filePath);
const start = stat.size < offset ? 0 : offset;
const length = stat.size - start;
if (length <= 0) return { lines: [], offset: stat.size };
const fd = openSync(filePath, "r");
try {
const buffer = Buffer.alloc(length);
readSync(fd, buffer, 0, length, start);
return {
lines: buffer
.toString("utf-8")
.split(/\n/)
.filter((line) => line.trim().length > 0),
offset: stat.size,
};
} finally {
closeSync(fd);
}
} catch {
return { lines: [], offset };
}
}
export async function runLogsCli(
argv: string[],
options: TailOptions,
): Promise<number> {
const parsed = parseLogsArgs(argv);
const stdout = options.stdout ?? process.stdout;
const stderr = options.stderr ?? process.stderr;
if (!parsed) {
stderr.write(usage());
return 1;
}
if (
parsed.source === undefined &&
argv.some((arg) => arg.startsWith("--source"))
) {
stderr.write("sf logs: unknown --source value\n");
return 1;
}
const source = parsed.source ?? options.source;
const severity = parsed.severity ?? options.severity;
const sfHome = options.sfHome ?? sfHomeFromEnv();
for (const event of collectRecentLogEvents({
basePath: options.basePath,
sfHome,
source,
severity,
limit: 50,
})) {
stdout.write(formatMergedLogEvent(event));
}
const offsets = new Map<string, number>();
const watched = new Map<string, SourceFile>();
const refreshWatches = () => {
for (const file of resolveSourceFiles(options.basePath, sfHome)) {
if (source && file.source !== source) continue;
if (watched.has(file.path)) continue;
watched.set(file.path, file);
try {
offsets.set(file.path, statSync(file.path).size);
} catch {
offsets.set(file.path, 0);
}
watchFile(file.path, { interval: 500 }, () => {
const currentOffset = offsets.get(file.path) ?? 0;
const result = readFromOffset(file.path, currentOffset);
offsets.set(file.path, result.offset);
for (const line of result.lines) {
const event = parseLogLine(file.source, line, file.path);
if (event && matchesFilters(event, source, severity)) {
stdout.write(formatMergedLogEvent(event));
}
}
});
}
};
refreshWatches();
const refreshTimer = setInterval(refreshWatches, 1000);
await new Promise<void>((resolve) => {
const stop = () => {
clearInterval(refreshTimer);
for (const path of watched.keys()) unwatchFile(path);
process.off("SIGINT", stop);
process.off("SIGTERM", stop);
resolve();
};
process.on("SIGINT", stop);
process.on("SIGTERM", stop);
});
return 0;
}