singularity-forge/src/tests/blob-store.test.ts
Mikael Hugo 02a4339a51 refactor: rename pi-* packages to forge-native names (Phase 1)
Rename all four packages/pi-* directories to forge-native names,
stripping the 'pi' identity and establishing forge's own:

- packages/pi-coding-agent → packages/coding-agent
- packages/pi-ai → packages/ai
- packages/pi-agent-core → packages/agent-core
- packages/pi-tui → packages/tui

Package names updated:
- @singularity-forge/pi-coding-agent → @singularity-forge/coding-agent
- @singularity-forge/pi-ai → @singularity-forge/ai
- @singularity-forge/pi-agent-core → @singularity-forge/agent-core
- @singularity-forge/pi-tui → @singularity-forge/tui

All import references, bare string references, path references,
internal variable names (_bundledPi*), and dist files updated.
@mariozechner/pi-* third-party compat aliases preserved.

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-05-10 11:28:01 +02:00

206 lines
8.3 KiB
TypeScript

/**
* Tests for BlobStore: content-addressed storage, path traversal protection,
* and blob ref parsing/externalization.
*/
import assert from "node:assert/strict";
import { createHash } from "node:crypto";
import { existsSync, mkdtempSync, readFileSync, rmSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { afterEach, test } from "vitest";
import {
BlobStore,
externalizeImageData,
isBlobRef,
parseBlobRef,
resolveImageData,
} from "../../packages/coding-agent/src/core/blob-store.ts";
// ─── Helpers ─────────────────────────────────────────────────────────────────
function makeTmpDir(): { dir: string; cleanup: () => void } {
const dir = mkdtempSync(join(tmpdir(), "blob-test-"));
return { dir, cleanup: () => rmSync(dir, { recursive: true, force: true }) };
}
function sha256(data: Buffer): string {
return createHash("sha256").update(data).digest("hex");
}
// ═══════════════════════════════════════════════════════════════════════════
// BlobStore.put / get / has
// ═══════════════════════════════════════════════════════════════════════════
test("put stores data and returns correct hash", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const data = Buffer.from("hello world");
const result = store.put(data);
assert.equal(result.hash, sha256(data));
assert.ok(existsSync(result.path));
assert.deepEqual(readFileSync(result.path), data);
});
test("put is idempotent — same data returns same hash, no duplicate write", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const data = Buffer.from("duplicate test");
const r1 = store.put(data);
const r2 = store.put(data);
assert.equal(r1.hash, r2.hash);
assert.equal(r1.path, r2.path);
});
test("get retrieves stored data", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const data = Buffer.from("retrieve me");
const { hash } = store.put(data);
const retrieved = store.get(hash);
assert.deepEqual(retrieved, data);
});
test("get returns null for nonexistent hash", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const fakeHash = "a".repeat(64);
assert.equal(store.get(fakeHash), null);
});
test("has returns true for stored blob", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const { hash } = store.put(Buffer.from("exists"));
assert.ok(store.has(hash));
});
test("has returns false for missing blob", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
assert.equal(store.has("b".repeat(64)), false);
});
test("ref property returns correct blob: URI", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const data = Buffer.from("ref test");
const result = store.put(data);
assert.equal(result.ref, `blob:sha256:${result.hash}`);
});
// ═══════════════════════════════════════════════════════════════════════════
// Path traversal protection
// ═══════════════════════════════════════════════════════════════════════════
test("get rejects non-hex hash (path traversal attempt)", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
assert.equal(store.get("../../etc/passwd"), null);
assert.equal(store.get("../../../foo"), null);
assert.equal(store.get("not-a-valid-hash"), null);
});
test("has rejects non-hex hash", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
assert.equal(store.has("../../etc/passwd"), false);
assert.equal(store.has("short"), false);
assert.equal(store.has("Z".repeat(64)), false); // uppercase not valid
});
test("get rejects hash with wrong length", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
assert.equal(store.get("a".repeat(63)), null); // too short
assert.equal(store.get("a".repeat(65)), null); // too long
});
// ═══════════════════════════════════════════════════════════════════════════
// parseBlobRef / isBlobRef
// ═══════════════════════════════════════════════════════════════════════════
test("isBlobRef identifies valid refs", () => {
assert.ok(isBlobRef(`blob:sha256:${"a".repeat(64)}`));
assert.equal(isBlobRef("not-a-ref"), false);
// isBlobRef is a cheap prefix check — parseBlobRef does full validation
assert.ok(isBlobRef("blob:sha256:"));
});
test("parseBlobRef extracts valid hash", () => {
const hash = "abcdef0123456789".repeat(4);
assert.equal(parseBlobRef(`blob:sha256:${hash}`), hash);
});
test("parseBlobRef rejects non-blob string", () => {
assert.equal(parseBlobRef("not-a-ref"), null);
});
test("parseBlobRef rejects invalid hash format", () => {
assert.equal(parseBlobRef("blob:sha256:../../etc/passwd"), null);
assert.equal(parseBlobRef("blob:sha256:too-short"), null);
assert.equal(parseBlobRef(`blob:sha256:${"G".repeat(64)}`), null);
});
// ═══════════════════════════════════════════════════════════════════════════
// externalizeImageData / resolveImageData
// ═══════════════════════════════════════════════════════════════════════════
test("externalizeImageData stores base64 and returns blob ref", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const base64 = Buffer.from("image bytes").toString("base64");
const ref = externalizeImageData(store, base64);
assert.ok(ref.startsWith("blob:sha256:"));
assert.ok(store.has(parseBlobRef(ref)!));
});
test("externalizeImageData passes through existing blob refs", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const existingRef = `blob:sha256:${"c".repeat(64)}`;
assert.equal(externalizeImageData(store, existingRef), existingRef);
});
test("resolveImageData round-trips with externalizeImageData", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const base64 = Buffer.from("round trip test").toString("base64");
const ref = externalizeImageData(store, base64);
const resolved = resolveImageData(store, ref);
assert.equal(resolved, base64);
});
test("resolveImageData returns non-ref strings unchanged", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
assert.equal(resolveImageData(store, "plain text"), "plain text");
});
test("resolveImageData returns ref unchanged when blob is missing", (_t) => {
const { dir, cleanup } = makeTmpDir();
afterEach(cleanup);
const store = new BlobStore(join(dir, "blobs"));
const missingRef = `blob:sha256:${"d".repeat(64)}`;
assert.equal(resolveImageData(store, missingRef), missingRef);
});