fix: deduplicate parseJSONL and unify MAX_JSONL_BYTES constant (#985)

Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
TÂCHES 2026-03-17 17:05:35 -06:00 committed by GitHub
parent 4da793f576
commit 482bbf678b
3 changed files with 24 additions and 26 deletions

View file

@ -15,6 +15,7 @@ import { fileURLToPath } from "node:url";
import { extractTrace, type ExecutionTrace } from "./session-forensics.js";
import { nativeParseJsonlTail } from "./native-parser-bridge.js";
import { MAX_JSONL_BYTES, parseJSONL } from "./jsonl-utils.js";
import {
loadLedgerFromDisk, getAverageCostPerUnitType, getProjectTotals,
formatCost, formatTokenCount, type UnitMetrics, type MetricsLedger,
@ -65,17 +66,6 @@ interface ForensicReport {
recentUnits: { type: string; id: string; cost: number; duration: number; model: string; finishedAt: number }[];
}
// ─── JSONL Parser (inline — session-forensics.ts version is module-private) ──
const MAX_JSONL_BYTES = 5 * 1024 * 1024;
function parseJSONL(raw: string): unknown[] {
const source = raw.length > MAX_JSONL_BYTES ? raw.slice(-MAX_JSONL_BYTES) : raw;
return source.trim().split("\n").map(line => {
try { return JSON.parse(line); } catch { return null; }
}).filter(Boolean) as unknown[];
}
// ─── Entry Point ──────────────────────────────────────────────────────────────
export async function handleForensics(

View file

@ -0,0 +1,21 @@
/**
* Shared JSONL parsing utilities.
*
* Both forensics.ts and session-forensics.ts need to parse JSONL activity logs
* with an upper byte limit to prevent V8 OOM on bloated files. This module
* provides the single canonical implementation and constant.
*/
/** Max bytes to parse from a JSONL source. Prevents V8 OOM on bloated activity logs. */
export const MAX_JSONL_BYTES = 10 * 1024 * 1024; // 10 MB
/**
* Parse a raw JSONL string into an array of parsed objects.
* If the input exceeds MAX_JSONL_BYTES, only the tail is parsed (most recent entries).
*/
export function parseJSONL(raw: string): unknown[] {
const source = raw.length > MAX_JSONL_BYTES ? raw.slice(-MAX_JSONL_BYTES) : raw;
return source.trim().split("\n").map(line => {
try { return JSON.parse(line); } catch { return null; }
}).filter(Boolean) as unknown[];
}

View file

@ -21,6 +21,7 @@
import { readFileSync, readdirSync, existsSync, statSync } from "node:fs";
import { basename, join } from "node:path";
import { nativeParseJsonlTail } from "./native-parser-bridge.js";
import { MAX_JSONL_BYTES, parseJSONL } from "./jsonl-utils.js";
import { nativeWorkingTreeStatus, nativeDiffStat } from "./native-git-bridge.js";
import { getAutoWorktreePath } from "./auto-worktree.js";
@ -63,21 +64,7 @@ export interface RecoveryBriefing {
}
// ─── JSONL Parsing ────────────────────────────────────────────────────────────
/** Max bytes to parse from a JSONL source. Prevents V8 OOM on bloated activity logs. */
const MAX_JSONL_BYTES = 10 * 1024 * 1024; // 10 MB
function parseJSONL(raw: string): unknown[] {
// If the file is enormous, only parse the tail (most recent entries).
// This prevents the OOM crash path: large file → split → map → parse → OOM.
const source = raw.length > MAX_JSONL_BYTES
? raw.slice(-MAX_JSONL_BYTES)
: raw;
return source.trim().split("\n").map(line => {
try { return JSON.parse(line); }
catch { return null; }
}).filter(Boolean) as unknown[];
}
// MAX_JSONL_BYTES and parseJSONL are imported from ./jsonl-utils.js
/**
* Find the entries belonging to the last session in a JSONL file.