Merge pull request #415 from fluxlabs/perf/repo-hotpath-optimizations

This commit is contained in:
TÂCHES 2026-03-14 15:58:04 -06:00 committed by GitHub
commit 4334f3a27f
16 changed files with 620 additions and 288 deletions

View file

@ -6,10 +6,11 @@
use std::path::Path;
use ignore::WalkBuilder;
use napi::bindgen_prelude::*;
use napi_derive::napi;
use crate::{fs_cache, task};
// ═══════════════════════════════════════════════════════════════════════════
// Public types
// ═══════════════════════════════════════════════════════════════════════════
@ -52,45 +53,6 @@ pub struct FuzzyFindResult {
pub total_matches: u32,
}
// ═══════════════════════════════════════════════════════════════════════════
// Path utilities
// ═══════════════════════════════════════════════════════════════════════════
/// Resolve a search path string to a canonical `PathBuf` (must be a directory).
fn resolve_search_path(path: &str) -> Result<std::path::PathBuf> {
let candidate = std::path::PathBuf::from(path);
let root = if candidate.is_absolute() {
candidate
} else {
let cwd = std::env::current_dir()
.map_err(|err| Error::from_reason(format!("Failed to resolve cwd: {err}")))?;
cwd.join(candidate)
};
let metadata = std::fs::metadata(&root)
.map_err(|err| Error::from_reason(format!("Path not found: {err}")))?;
if !metadata.is_dir() {
return Err(Error::from_reason(
"Search path must be a directory".to_string(),
));
}
Ok(std::fs::canonicalize(&root).unwrap_or(root))
}
/// Check if a path component matches a target string.
fn contains_component(path: &Path, target: &str) -> bool {
path.components().any(|component| {
component
.as_os_str()
.to_str()
.is_some_and(|value| value == target)
})
}
/// Skip `.git` directories and `node_modules`.
fn should_skip_path(path: &Path) -> bool {
contains_component(path, ".git") || contains_component(path, "node_modules")
}
// ═══════════════════════════════════════════════════════════════════════════
// Scoring
// ═══════════════════════════════════════════════════════════════════════════
@ -189,96 +151,6 @@ fn score_fuzzy_path(
score
}
// ═══════════════════════════════════════════════════════════════════════════
// Directory walking
// ═══════════════════════════════════════════════════════════════════════════
/// File type classification for discovered entries.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum EntryType {
File,
Dir,
Symlink,
}
/// A filesystem entry discovered during walking.
struct WalkEntry {
/// Relative path from root (forward slashes).
path: String,
/// Entry type.
entry_type: EntryType,
}
/// Walk a directory tree collecting entries.
fn walk_directory(
root: &Path,
include_hidden: bool,
respect_gitignore: bool,
) -> Vec<WalkEntry> {
let mut builder = WalkBuilder::new(root);
builder
.hidden(!include_hidden)
.follow_links(false)
.sort_by_file_path(|a, b| a.cmp(b));
if respect_gitignore {
builder
.git_ignore(true)
.git_exclude(true)
.git_global(true)
.ignore(true)
.parents(true);
} else {
builder
.git_ignore(false)
.git_exclude(false)
.git_global(false)
.ignore(false)
.parents(false);
}
let mut entries = Vec::new();
for entry in builder.build() {
let Ok(entry) = entry else { continue };
let path = entry.path();
if should_skip_path(path) {
continue;
}
let relative = path.strip_prefix(root).unwrap_or(path);
let relative_str = relative.to_string_lossy();
if relative_str.is_empty() {
continue;
}
// Normalize to forward slashes on all platforms.
let relative_str = if cfg!(windows) && relative_str.contains('\\') {
relative_str.replace('\\', "/")
} else {
relative_str.into_owned()
};
let Some(metadata) = std::fs::symlink_metadata(path).ok() else {
continue;
};
let file_type = metadata.file_type();
let entry_type = if file_type.is_symlink() {
EntryType::Symlink
} else if file_type.is_dir() {
EntryType::Dir
} else {
EntryType::File
};
entries.push(WalkEntry {
path: relative_str,
entry_type,
});
}
entries
}
// ═══════════════════════════════════════════════════════════════════════════
// Execution
// ═══════════════════════════════════════════════════════════════════════════
@ -294,7 +166,7 @@ fn clamp_u32(value: u64) -> u32 {
/// Results are sorted by match quality (higher score = better match).
#[napi(js_name = "fuzzyFind")]
pub fn fuzzy_find(options: FuzzyFindOptions) -> Result<FuzzyFindResult> {
let root = resolve_search_path(&options.path)?;
let root = fs_cache::resolve_search_path(&options.path)?;
let include_hidden = options.hidden.unwrap_or(false);
let respect_gitignore = options.gitignore.unwrap_or(true);
let max_results = options.max_results.unwrap_or(100) as usize;
@ -317,15 +189,39 @@ pub fn fuzzy_find(options: FuzzyFindOptions) -> Result<FuzzyFindResult> {
});
}
let entries = walk_directory(&root, include_hidden, respect_gitignore);
let ct = task::CancelToken::default();
let scan = fs_cache::get_or_scan(&root, include_hidden, respect_gitignore, &ct)?;
let mut scored = collect_matches(&scan.entries, &query_lower, &normalized_query, &query_chars);
if scored.is_empty() && scan.cache_age_ms >= fs_cache::empty_recheck_ms() {
let fresh = fs_cache::force_rescan(&root, include_hidden, respect_gitignore, true, &ct)?;
scored = collect_matches(&fresh, &query_lower, &normalized_query, &query_chars);
}
scored.sort_by(|a, b| b.score.cmp(&a.score).then_with(|| a.path.cmp(&b.path)));
let total_matches = clamp_u32(scored.len() as u64);
let matches = scored.into_iter().take(max_results).collect();
Ok(FuzzyFindResult {
matches,
total_matches,
})
}
fn collect_matches(
entries: &[fs_cache::GlobMatch],
query_lower: &str,
normalized_query: &str,
query_chars: &[char],
) -> Vec<FuzzyFindMatch> {
let mut scored: Vec<FuzzyFindMatch> = Vec::with_capacity(entries.len().min(256));
for entry in entries {
if entry.entry_type == EntryType::Symlink {
if entry.file_type == fs_cache::FileType::Symlink {
continue;
}
let is_directory = entry.entry_type == EntryType::Dir;
let is_directory = entry.file_type == fs_cache::FileType::Dir;
let score = score_fuzzy_path(
&entry.path,
is_directory,
@ -337,7 +233,7 @@ pub fn fuzzy_find(options: FuzzyFindOptions) -> Result<FuzzyFindResult> {
continue;
}
let mut path = entry.path;
let mut path = entry.path.clone();
if is_directory {
path.push('/');
}
@ -348,14 +244,7 @@ pub fn fuzzy_find(options: FuzzyFindOptions) -> Result<FuzzyFindResult> {
});
}
scored.sort_by(|a, b| b.score.cmp(&a.score).then_with(|| a.path.cmp(&b.path)));
let total_matches = clamp_u32(scored.len() as u64);
let matches = scored.into_iter().take(max_results).collect();
Ok(FuzzyFindResult {
matches,
total_matches,
})
scored
}
// ═══════════════════════════════════════════════════════════════════════════
@ -480,7 +369,9 @@ mod tests {
#[test]
fn test_walk_directory_real_fs() {
let root = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let entries = walk_directory(&root, false, true);
let entries =
fs_cache::force_rescan(&root, false, true, false, &task::CancelToken::default())
.expect("force_rescan should succeed");
let paths: Vec<&str> = entries.iter().map(|e| e.path.as_str()).collect();
assert!(
paths.iter().any(|p| p.contains("fd.rs")),

View file

@ -12,8 +12,9 @@
use std::{
borrow::Cow,
ops::Deref,
path::{Path, PathBuf},
sync::LazyLock,
sync::{Arc, LazyLock},
time::{Duration, Instant},
};
@ -100,10 +101,27 @@ struct CacheKey {
use_gitignore: bool,
}
#[derive(Clone)]
pub struct SharedGlobEntries(Arc<[GlobMatch]>);
impl SharedGlobEntries {
fn from_vec(entries: Vec<GlobMatch>) -> Self {
Self(Arc::from(entries))
}
}
impl Deref for SharedGlobEntries {
type Target = [GlobMatch];
fn deref(&self) -> &Self::Target {
self.0.as_ref()
}
}
#[derive(Clone)]
struct CacheEntry {
created_at: Instant,
entries: Vec<GlobMatch>,
entries: SharedGlobEntries,
}
static FS_CACHE: LazyLock<DashMap<CacheKey, CacheEntry>> = LazyLock::new(DashMap::new);
@ -111,7 +129,7 @@ static FS_CACHE: LazyLock<DashMap<CacheKey, CacheEntry>> = LazyLock::new(DashMap
/// Result of a cache-aware scan, including the age of the cached data.
pub struct ScanResult {
/// Scanned filesystem entries.
pub entries: Vec<GlobMatch>,
pub entries: SharedGlobEntries,
/// How old the cached data is in milliseconds (0 = freshly scanned).
pub cache_age_ms: u64,
}
@ -293,7 +311,8 @@ pub fn get_or_scan(
) -> Result<ScanResult> {
let ttl = cache_ttl_ms();
if ttl == 0 {
let entries = collect_entries(root, include_hidden, use_gitignore, ct)?;
let entries =
SharedGlobEntries::from_vec(collect_entries(root, include_hidden, use_gitignore, ct)?);
return Ok(ScanResult {
entries,
cache_age_ms: 0,
@ -319,7 +338,8 @@ pub fn get_or_scan(
FS_CACHE.remove(&key);
}
let entries = collect_entries(root, include_hidden, use_gitignore, ct)?;
let entries =
SharedGlobEntries::from_vec(collect_entries(root, include_hidden, use_gitignore, ct)?);
FS_CACHE.insert(
key,
CacheEntry {
@ -344,7 +364,7 @@ pub fn force_rescan(
use_gitignore: bool,
store: bool,
ct: &task::CancelToken,
) -> Result<Vec<GlobMatch>> {
) -> Result<SharedGlobEntries> {
let key = CacheKey {
root: root.to_path_buf(),
include_hidden,
@ -352,7 +372,8 @@ pub fn force_rescan(
};
FS_CACHE.remove(&key);
let entries = collect_entries(root, include_hidden, use_gitignore, ct)?;
let entries =
SharedGlobEntries::from_vec(collect_entries(root, include_hidden, use_gitignore, ct)?);
if store {
let now = Instant::now();
FS_CACHE.insert(

View file

@ -5,6 +5,8 @@
use napi::bindgen_prelude::*;
use napi_derive::napi;
use crate::task;
// ── N-API types (mirroring gsd_grep types for the JS boundary) ────────
#[napi(object)]
@ -114,8 +116,16 @@ fn convert_search_match(m: gsd_grep::SearchMatch) -> NapiSearchMatch {
NapiSearchMatch {
line_number: clamp_u32(m.line_number),
line: m.line,
context_before: m.context_before.into_iter().map(convert_context_line).collect(),
context_after: m.context_after.into_iter().map(convert_context_line).collect(),
context_before: m
.context_before
.into_iter()
.map(convert_context_line)
.collect(),
context_after: m
.context_after
.into_iter()
.map(convert_context_line)
.collect(),
truncated: m.truncated,
}
}
@ -125,8 +135,16 @@ fn convert_file_match(m: gsd_grep::FileMatch) -> NapiGrepMatch {
path: m.path,
line_number: clamp_u32(m.line_number),
line: m.line,
context_before: m.context_before.into_iter().map(convert_context_line).collect(),
context_after: m.context_after.into_iter().map(convert_context_line).collect(),
context_before: m
.context_before
.into_iter()
.map(convert_context_line)
.collect(),
context_after: m
.context_after
.into_iter()
.map(convert_context_line)
.collect(),
truncated: m.truncated,
}
}
@ -151,7 +169,11 @@ pub fn search(content: Buffer, options: NapiSearchOptions) -> Result<NapiSearchR
match gsd_grep::search_content(content.as_ref(), &opts) {
Ok(result) => Ok(NapiSearchResult {
matches: result.matches.into_iter().map(convert_search_match).collect(),
matches: result
.matches
.into_iter()
.map(convert_search_match)
.collect(),
match_count: clamp_u32(result.match_count),
limit_reached: result.limit_reached,
}),
@ -164,29 +186,31 @@ pub fn search(content: Buffer, options: NapiSearchOptions) -> Result<NapiSearchR
/// Walks the directory tree respecting `.gitignore` and optional glob filters.
/// Returns matches with file paths, line numbers, and optional context.
#[napi(js_name = "grep")]
pub fn grep(options: NapiGrepOptions) -> Result<NapiGrepResult> {
let opts = gsd_grep::GrepOptions {
pattern: options.pattern,
path: options.path,
glob: options.glob,
ignore_case: options.ignore_case.unwrap_or(false),
multiline: options.multiline.unwrap_or(false),
hidden: options.hidden.unwrap_or(false),
gitignore: options.gitignore.unwrap_or(true),
max_count: options.max_count.map(u64::from),
context_before: options.context_before.unwrap_or(0),
context_after: options.context_after.unwrap_or(0),
max_columns: options.max_columns.map(|v| v as usize),
};
pub fn grep(options: NapiGrepOptions) -> task::Async<NapiGrepResult> {
task::blocking("grep", (), move |_ct| {
let opts = gsd_grep::GrepOptions {
pattern: options.pattern,
path: options.path,
glob: options.glob,
ignore_case: options.ignore_case.unwrap_or(false),
multiline: options.multiline.unwrap_or(false),
hidden: options.hidden.unwrap_or(false),
gitignore: options.gitignore.unwrap_or(true),
max_count: options.max_count.map(u64::from),
context_before: options.context_before.unwrap_or(0),
context_after: options.context_after.unwrap_or(0),
max_columns: options.max_columns.map(|v| v as usize),
};
match gsd_grep::search_path(&opts) {
Ok(result) => Ok(NapiGrepResult {
matches: result.matches.into_iter().map(convert_file_match).collect(),
total_matches: clamp_u32(result.total_matches),
files_with_matches: result.files_with_matches,
files_searched: result.files_searched,
limit_reached: result.limit_reached,
}),
Err(err) => Err(Error::from_reason(err)),
}
match gsd_grep::search_path(&opts) {
Ok(result) => Ok(NapiGrepResult {
matches: result.matches.into_iter().map(convert_file_match).collect(),
total_matches: clamp_u32(result.total_matches),
files_with_matches: result.files_with_matches,
files_searched: result.files_searched,
limit_reached: result.limit_reached,
}),
Err(err) => Err(Error::from_reason(err)),
}
})
}

View file

@ -139,6 +139,41 @@ describe("native fd: fuzzyFind()", () => {
);
});
test("reuses the shared fs scan cache until invalidated", (t) => {
const previousTtl = process.env.FS_SCAN_CACHE_TTL_MS;
process.env.FS_SCAN_CACHE_TTL_MS = "10000";
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
t.after(() => {
native.invalidateFsScanCache(tmpDir);
fs.rmSync(tmpDir, { recursive: true, force: true });
if (previousTtl === undefined) {
delete process.env.FS_SCAN_CACHE_TTL_MS;
} else {
process.env.FS_SCAN_CACHE_TTL_MS = previousTtl;
}
});
fs.writeFileSync(path.join(tmpDir, "cached.txt"), "cached");
native.invalidateFsScanCache(tmpDir);
const warm = native.fuzzyFind({ query: "cached", path: tmpDir });
assert.ok(warm.matches.some((m) => m.path === "cached.txt"));
fs.unlinkSync(path.join(tmpDir, "cached.txt"));
const cached = native.fuzzyFind({ query: "cached", path: tmpDir });
assert.ok(
cached.matches.some((m) => m.path === "cached.txt"),
"should serve warm results from the shared fs scan cache",
);
native.invalidateFsScanCache(tmpDir);
const refreshed = native.fuzzyFind({ query: "cached", path: tmpDir });
assert.equal(refreshed.matches.length, 0);
});
test("results are sorted by score descending", (t) => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));

View file

@ -93,7 +93,24 @@ describe("native grep: search()", () => {
describe("native grep: grep()", () => {
let tmpDir;
test("searches files on disk", (t) => {
test("returns a promise", async (t) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\n");
const pending = native.grep({
pattern: "hello",
path: tmpDir,
});
assert.equal(typeof pending?.then, "function");
const result = await pending;
assert.equal(result.totalMatches, 1);
});
test("searches files on disk", async (t) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
@ -101,7 +118,7 @@ describe("native grep: grep()", () => {
fs.writeFileSync(path.join(tmpDir, "file2.txt"), "hello rust\nbaz qux\n");
fs.writeFileSync(path.join(tmpDir, "file3.log"), "no match here\n");
const result = native.grep({
const result = await native.grep({
pattern: "hello",
path: tmpDir,
});
@ -115,7 +132,7 @@ describe("native grep: grep()", () => {
assert.deepEqual(paths, [...paths].sort());
});
test("respects glob filter", (t) => {
test("respects glob filter", async (t) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
@ -123,7 +140,7 @@ describe("native grep: grep()", () => {
fs.writeFileSync(path.join(tmpDir, "code.js"), "hello javascript\n");
fs.writeFileSync(path.join(tmpDir, "readme.md"), "hello markdown\n");
const result = native.grep({
const result = await native.grep({
pattern: "hello",
path: tmpDir,
glob: "*.ts",
@ -133,7 +150,7 @@ describe("native grep: grep()", () => {
assert.equal(result.matches[0].line, "hello typescript");
});
test("respects maxCount", (t) => {
test("respects maxCount", async (t) => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
@ -141,7 +158,7 @@ describe("native grep: grep()", () => {
fs.writeFileSync(path.join(tmpDir, `file${i}.txt`), "match_me\n");
}
const result = native.grep({
const result = await native.grep({
pattern: "match_me",
path: tmpDir,
maxCount: 3,
@ -151,9 +168,9 @@ describe("native grep: grep()", () => {
assert.equal(result.limitReached, true);
});
test("errors on non-existent path", () => {
assert.throws(() => {
native.grep({
test("errors on non-existent path", async () => {
await assert.rejects(() => {
return native.grep({
pattern: "test",
path: "/nonexistent/path/that/does/not/exist",
});

View file

@ -19,6 +19,7 @@ export type { FuzzyFindMatch, FuzzyFindOptions, FuzzyFindResult };
*
* Searches for files and directories whose paths match the query string.
* Results are sorted by match quality (higher score = better match).
* Reuses the shared native filesystem scan cache used by glob discovery.
*
* Scoring tiers (highest to lowest):
* - 120: exact filename match

View file

@ -42,7 +42,8 @@ export function searchContent(
* Search files on disk for a regex pattern.
*
* Walks the directory tree respecting .gitignore and optional glob filters.
* Runs on the native blocking worker pool and resolves asynchronously.
*/
export function grep(options: GrepOptions): GrepResult {
return native.grep(options) as GrepResult;
export function grep(options: GrepOptions): Promise<GrepResult> {
return native.grep(options) as Promise<GrepResult>;
}

View file

@ -0,0 +1,65 @@
import assert from "node:assert/strict";
import { describe, it } from "node:test";
import { mkdtempSync, rmSync } from "node:fs";
import { join } from "node:path";
import { tmpdir } from "node:os";
import { SessionManager } from "./session-manager.js";
function makeAssistantMessage(input: number, output: number, cacheRead = 0, cacheWrite = 0, cost = 0) {
return {
role: "assistant",
content: [{ type: "text", text: "ok" }],
usage: {
input,
output,
cacheRead,
cacheWrite,
total: input + output + cacheRead + cacheWrite,
cost: { total: cost },
},
} as any;
}
describe("SessionManager usage totals", () => {
it("tracks assistant usage incrementally without rescanning entries", () => {
const dir = mkdtempSync(join(tmpdir(), "gsd-session-manager-test-"));
try {
const manager = SessionManager.create(dir, dir);
manager.appendMessage({ role: "user", content: [{ type: "text", text: "hello" }] } as any);
manager.appendMessage(makeAssistantMessage(10, 5, 3, 2, 0.25));
manager.appendMessage(makeAssistantMessage(7, 4, 1, 0, 0.1));
assert.deepEqual(manager.getUsageTotals(), {
input: 17,
output: 9,
cacheRead: 4,
cacheWrite: 2,
cost: 0.35,
});
} finally {
rmSync(dir, { recursive: true, force: true });
}
});
it("resets totals when starting a new session", () => {
const dir = mkdtempSync(join(tmpdir(), "gsd-session-manager-test-"));
try {
const manager = SessionManager.create(dir, dir);
manager.appendMessage(makeAssistantMessage(5, 5, 0, 0, 0.05));
assert.equal(manager.getUsageTotals().input, 5);
manager.newSession();
assert.deepEqual(manager.getUsageTotals(), {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
cost: 0,
});
} finally {
rmSync(dir, { recursive: true, force: true });
}
});
});

View file

@ -182,6 +182,14 @@ export interface SessionInfo {
allMessagesText: string;
}
export interface SessionUsageTotals {
input: number;
output: number;
cacheRead: number;
cacheWrite: number;
cost: number;
}
export type ReadonlySessionManager = Pick<
SessionManager,
| "getCwd"
@ -195,10 +203,21 @@ export type ReadonlySessionManager = Pick<
| "getBranch"
| "getHeader"
| "getEntries"
| "getUsageTotals"
| "getTree"
| "getSessionName"
>;
function createEmptyUsageTotals(): SessionUsageTotals {
return {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
cost: 0,
};
}
/** Generate a unique short ID (8 hex chars, collision-checked) */
function generateId(byId: { has(id: string): boolean }): string {
for (let i = 0; i < 100; i++) {
@ -779,10 +798,12 @@ export class SessionManager {
private persist: boolean;
private flushed: boolean = false;
private fileEntries: FileEntry[] = [];
private sessionEntries: SessionEntry[] = [];
private byId: Map<string, SessionEntry> = new Map();
private blobStore: BlobStore;
private labelsById: Map<string, string> = new Map();
private leafId: string | null = null;
private usageTotals: SessionUsageTotals = createEmptyUsageTotals();
private constructor(cwd: string, sessionDir: string, sessionFile: string | undefined, persist: boolean) {
this.cwd = cwd;
@ -846,9 +867,11 @@ export class SessionManager {
parentSession: options?.parentSession,
};
this.fileEntries = [header];
this.sessionEntries = [];
this.byId.clear();
this.labelsById.clear();
this.leafId = null;
this.usageTotals = createEmptyUsageTotals();
this.flushed = false;
if (this.persist) {
@ -859,13 +882,17 @@ export class SessionManager {
}
private _buildIndex(): void {
this.sessionEntries = [];
this.byId.clear();
this.labelsById.clear();
this.leafId = null;
this.usageTotals = createEmptyUsageTotals();
for (const entry of this.fileEntries) {
if (entry.type === "session") continue;
this.sessionEntries.push(entry);
this.byId.set(entry.id, entry);
this.leafId = entry.id;
this._accumulateUsage(entry);
if (entry.type === "label") {
if (entry.label) {
this.labelsById.set(entry.targetId, entry.label);
@ -926,11 +953,30 @@ export class SessionManager {
private _appendEntry(entry: SessionEntry): void {
this.fileEntries.push(entry);
this.sessionEntries.push(entry);
this.byId.set(entry.id, entry);
this.leafId = entry.id;
this._accumulateUsage(entry);
this._persist(entry);
}
private _accumulateUsage(entry: SessionEntry): void {
if (entry.type !== "message" || entry.message.role !== "assistant") {
return;
}
const usage = entry.message.usage;
if (!usage) {
return;
}
this.usageTotals.input += usage.input;
this.usageTotals.output += usage.output;
this.usageTotals.cacheRead += usage.cacheRead;
this.usageTotals.cacheWrite += usage.cacheWrite;
this.usageTotals.cost += usage.cost.total;
}
/** Append a message as child of current leaf, then advance leaf. Returns entry id.
* Does not allow writing CompactionSummaryMessage and BranchSummaryMessage directly.
* Reason: we want these to be top-level entries in the session, not message session entries,
@ -1167,7 +1213,11 @@ export class SessionManager {
* change the leaf pointer. Entries cannot be modified or deleted.
*/
getEntries(): SessionEntry[] {
return this.fileEntries.filter((e): e is SessionEntry => e.type !== "session");
return [...this.sessionEntries];
}
getUsageTotals(): SessionUsageTotals {
return { ...this.usageTotals };
}
/**

View file

@ -140,6 +140,7 @@ export function createFindTool(cwd: string, options?: FindToolOptions): AgentToo
path: searchPath,
hidden: true,
gitignore: true,
cache: true,
maxResults: effectiveLimit,
});

View file

@ -61,22 +61,12 @@ export class FooterComponent implements Component {
render(width: number): string[] {
const state = this.session.state;
// Calculate cumulative usage from ALL session entries (not just post-compaction messages)
let totalInput = 0;
let totalOutput = 0;
let totalCacheRead = 0;
let totalCacheWrite = 0;
let totalCost = 0;
for (const entry of this.session.sessionManager.getEntries()) {
if (entry.type === "message" && entry.message.role === "assistant") {
totalInput += entry.message.usage.input;
totalOutput += entry.message.usage.output;
totalCacheRead += entry.message.usage.cacheRead;
totalCacheWrite += entry.message.usage.cacheWrite;
totalCost += entry.message.usage.cost.total;
}
}
const usageTotals = this.session.sessionManager.getUsageTotals();
const totalInput = usageTotals.input;
const totalOutput = usageTotals.output;
const totalCacheRead = usageTotals.cacheRead;
const totalCacheWrite = usageTotals.cacheWrite;
const totalCost = usageTotals.cost;
// Calculate context usage from session (handles compaction correctly).
// After compaction, tokens are unknown until the next LLM response.

View file

@ -5,6 +5,7 @@ import { fuzzyFind } from "@gsd/native/fd";
import { fuzzyFilter } from "./fuzzy.js";
const PATH_DELIMITERS = new Set([" ", "\t", '"', "'", "="]);
const FUZZY_FILE_MAX_RESULTS = 20;
function findLastDelimiter(text: string): number {
for (let i = text.length - 1; i >= 0; i -= 1) {
@ -562,15 +563,12 @@ export class CombinedAutocompleteProvider implements AutocompleteProvider {
path: searchPath,
hidden: true,
gitignore: true,
maxResults: 100,
maxResults: FUZZY_FILE_MAX_RESULTS,
});
// Take top 20 matches (already sorted by score descending from native module)
const topMatches = result.matches.slice(0, 20);
// Build suggestions
const suggestions: AutocompleteItem[] = [];
for (const { path: entryPath, isDirectory } of topMatches) {
for (const { path: entryPath, isDirectory } of result.matches) {
// Native module includes trailing / for directories
const pathWithoutSlash = isDirectory ? entryPath.slice(0, -1) : entryPath;
const displayPath = scopedQuery

View file

@ -104,6 +104,12 @@ interface LayoutLine {
cursorPos?: number;
}
interface VisualLine {
logicalLine: number;
startCol: number;
length: number;
}
export interface EditorTheme {
borderColor: (str: string) => string;
selectList: SelectListTheme;
@ -168,6 +174,10 @@ export class Editor implements Component, Focusable {
// Undo support
private undoStack = new UndoStack<EditorState>();
private textVersion = 0;
private cachedText: string | null = null;
private layoutCache: { width: number; textVersion: number; lines: LayoutLine[] } | null = null;
private visualLineMapCache: { width: number; textVersion: number; lines: VisualLine[] } | null = null;
public onSubmit?: (text: string) => void;
public onChange?: (text: string) => void;
@ -211,6 +221,31 @@ export class Editor implements Component, Focusable {
this.autocompleteProvider = provider;
}
private clearLayoutCaches(): void {
this.layoutCache = null;
this.visualLineMapCache = null;
}
private emitChange(): void {
this.textVersion += 1;
this.cachedText = null;
this.clearLayoutCaches();
if (this.onChange) {
this.onChange(this.getText());
}
}
private getLayoutLines(width: number): LayoutLine[] {
const cached = this.layoutCache;
if (cached && cached.width === width && cached.textVersion === this.textVersion) {
return cached.lines;
}
const lines = this.layoutText(width);
this.layoutCache = { width, textVersion: this.textVersion, lines };
return lines;
}
/**
* Add a prompt to history for up/down arrow navigation.
* Called after successful submission.
@ -273,14 +308,11 @@ export class Editor implements Component, Focusable {
this.setCursorCol(this.state.lines[this.state.cursorLine]?.length || 0);
// Reset scroll - render() will adjust to show cursor
this.scrollOffset = 0;
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
invalidate(): void {
// No cached state to invalidate currently
this.clearLayoutCaches();
}
render(width: number): string[] {
@ -298,7 +330,7 @@ export class Editor implements Component, Focusable {
const horizontal = this.borderColor("─");
// Layout the text
const layoutLines = this.layoutText(layoutWidth);
const layoutLines = this.getLayoutLines(layoutWidth);
// Calculate max visible lines: 30% of terminal height, minimum 5 lines
const terminalRows = this.tui.terminal.rows;
@ -494,7 +526,7 @@ export class Editor implements Component, Focusable {
this.state.cursorLine = result.cursorLine;
this.setCursorCol(result.cursorCol);
this.cancelAutocomplete();
if (this.onChange) this.onChange(this.getText());
this.emitChange();
if (shouldChainSlashArgumentAutocomplete && this.isBareCompletedSlashCommandAtCursor()) {
this.tryTriggerAutocomplete();
@ -524,7 +556,7 @@ export class Editor implements Component, Focusable {
// Fall through to submit
} else {
this.cancelAutocomplete();
if (this.onChange) this.onChange(this.getText());
this.emitChange();
return;
}
}
@ -787,7 +819,10 @@ export class Editor implements Component, Focusable {
}
getText(): string {
return this.state.lines.join("\n");
if (this.cachedText === null) {
this.cachedText = this.state.lines.join("\n");
}
return this.cachedText;
}
/**
@ -877,9 +912,7 @@ export class Editor implements Component, Focusable {
this.setCursorCol((insertedLines[insertedLines.length - 1] || "").length);
}
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
// All the editor methods from before...
@ -906,9 +939,7 @@ export class Editor implements Component, Focusable {
this.state.lines[this.state.cursorLine] = before + char + after;
this.setCursorCol(this.state.cursorCol + char.length);
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
// Check if we should trigger or update autocomplete
if (!this.autocompleteState) {
@ -1021,9 +1052,7 @@ export class Editor implements Component, Focusable {
this.state.cursorLine++;
this.setCursorCol(0);
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
private shouldSubmitOnBackslashEnter(data: string, kb: ReturnType<typeof getEditorKeybindings>): boolean {
@ -1052,7 +1081,7 @@ export class Editor implements Component, Focusable {
this.undoStack.clear();
this.lastAction = null;
if (this.onChange) this.onChange("");
this.emitChange();
if (this.onSubmit) this.onSubmit(result);
}
@ -1091,9 +1120,7 @@ export class Editor implements Component, Focusable {
this.setCursorCol(previousLine.length);
}
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
// Update or re-trigger autocomplete after backspace
if (this.autocompleteState) {
@ -1256,9 +1283,7 @@ export class Editor implements Component, Focusable {
this.setCursorCol(previousLine.length);
}
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
private deleteToEndOfLine(): void {
@ -1288,9 +1313,7 @@ export class Editor implements Component, Focusable {
this.state.lines.splice(this.state.cursorLine + 1, 1);
}
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
private deleteWordBackwards(): void {
@ -1333,9 +1356,7 @@ export class Editor implements Component, Focusable {
this.setCursorCol(deleteFrom);
}
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
private deleteWordForward(): void {
@ -1375,9 +1396,7 @@ export class Editor implements Component, Focusable {
currentLine.slice(0, this.state.cursorCol) + currentLine.slice(deleteTo);
}
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
private handleForwardDelete(): void {
@ -1409,9 +1428,7 @@ export class Editor implements Component, Focusable {
this.state.lines.splice(this.state.cursorLine + 1, 1);
}
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
// Update or re-trigger autocomplete after forward delete
if (this.autocompleteState) {
@ -1437,8 +1454,13 @@ export class Editor implements Component, Focusable {
* - startCol: starting column in the logical line
* - length: length of this visual line segment
*/
private buildVisualLineMap(width: number): Array<{ logicalLine: number; startCol: number; length: number }> {
const visualLines: Array<{ logicalLine: number; startCol: number; length: number }> = [];
private buildVisualLineMap(width: number): VisualLine[] {
const cached = this.visualLineMapCache;
if (cached && cached.width === width && cached.textVersion === this.textVersion) {
return cached.lines;
}
const visualLines: VisualLine[] = [];
for (let i = 0; i < this.state.lines.length; i++) {
const line = this.state.lines[i] || "";
@ -1461,6 +1483,11 @@ export class Editor implements Component, Focusable {
}
}
this.visualLineMapCache = {
width,
textVersion: this.textVersion,
lines: visualLines,
};
return visualLines;
}
@ -1674,9 +1701,7 @@ export class Editor implements Component, Focusable {
this.setCursorCol((lines[lines.length - 1] || "").length);
}
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
/**
@ -1716,9 +1741,7 @@ export class Editor implements Component, Focusable {
this.setCursorCol(startCol);
}
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
private pushUndoSnapshot(): void {
@ -1732,9 +1755,7 @@ export class Editor implements Component, Focusable {
Object.assign(this.state, snapshot);
this.lastAction = null;
this.preferredVisualCol = null;
if (this.onChange) {
this.onChange(this.getText());
}
this.emitChange();
}
/**
@ -1976,7 +1997,7 @@ https://github.com/EsotericSoftware/spine-runtimes/actions/runs/19536643416/job/
this.state.lines = result.lines;
this.state.cursorLine = result.cursorLine;
this.setCursorCol(result.cursorCol);
if (this.onChange) this.onChange(this.getText());
this.emitChange();
return;
}

View file

@ -9,10 +9,62 @@
*/
import { writeFileSync, mkdirSync, readdirSync, unlinkSync, statSync } from "node:fs";
import { existsSync } from "node:fs";
import { createHash } from "node:crypto";
import { join } from "node:path";
import type { ExtensionContext } from "@gsd/pi-coding-agent";
import { gsdRoot } from "./paths.js";
interface ActivityLogState {
nextSeq: number;
lastSnapshotKeyByUnit: Map<string, string>;
}
const activityLogState = new Map<string, ActivityLogState>();
function scanNextSequence(activityDir: string): number {
let maxSeq = 0;
try {
for (const f of readdirSync(activityDir)) {
const match = f.match(/^(\d+)-/);
if (match) maxSeq = Math.max(maxSeq, parseInt(match[1], 10));
}
} catch {
return 1;
}
return maxSeq + 1;
}
function getActivityState(activityDir: string): ActivityLogState {
let state = activityLogState.get(activityDir);
if (!state) {
state = { nextSeq: scanNextSequence(activityDir), lastSnapshotKeyByUnit: new Map() };
activityLogState.set(activityDir, state);
}
return state;
}
function snapshotKey(unitType: string, unitId: string, content: string): string {
const digest = createHash("sha1").update(content).digest("hex");
return `${unitType}\0${unitId}\0${digest}`;
}
function nextActivityFilePath(
activityDir: string,
state: ActivityLogState,
unitType: string,
safeUnitId: string,
): string {
while (true) {
const seq = String(state.nextSeq).padStart(3, "0");
const filePath = join(activityDir, `${seq}-${unitType}-${safeUnitId}.jsonl`);
if (!existsSync(filePath)) {
return filePath;
}
state.nextSeq = scanNextSequence(activityDir);
}
}
export function saveActivityLog(
ctx: ExtensionContext,
basePath: string,
@ -26,22 +78,17 @@ export function saveActivityLog(
const activityDir = join(gsdRoot(basePath), "activity");
mkdirSync(activityDir, { recursive: true });
// Next sequence number
let maxSeq = 0;
try {
for (const f of readdirSync(activityDir)) {
const match = f.match(/^(\d+)-/);
if (match) maxSeq = Math.max(maxSeq, parseInt(match[1], 10));
}
} catch { /* empty dir */ }
const seq = String(maxSeq + 1).padStart(3, "0");
const safeUnitId = unitId.replace(/\//g, "-");
const fileName = `${seq}-${unitType}-${safeUnitId}.jsonl`;
const filePath = join(activityDir, fileName);
const content = `${entries.map(entry => JSON.stringify(entry)).join("\n")}\n`;
const state = getActivityState(activityDir);
const unitKey = `${unitType}\0${safeUnitId}`;
const key = snapshotKey(unitType, safeUnitId, content);
if (state.lastSnapshotKeyByUnit.get(unitKey) === key) return;
const lines = entries.map(entry => JSON.stringify(entry));
writeFileSync(filePath, lines.join("\n") + "\n", "utf-8");
const filePath = nextActivityFilePath(activityDir, state, unitType, safeUnitId);
writeFileSync(filePath, content, "utf-8");
state.nextSeq += 1;
state.lastSnapshotKeyByUnit.set(unitKey, key);
} catch {
// Don't let logging failures break auto-mode
}

View file

@ -87,6 +87,8 @@ export class GSDDashboardOverlay {
private dashData: AutoDashboardData;
private milestoneData: MilestoneView | null = null;
private loading = true;
private loadedDashboardIdentity?: string;
private refreshInFlight: Promise<void> | null = null;
constructor(
tui: { requestRender: () => void },
@ -98,28 +100,67 @@ export class GSDDashboardOverlay {
this.onClose = onClose;
this.dashData = getAutoDashboardData();
this.loadData().then(() => {
this.loading = false;
this.invalidate();
this.tui.requestRender();
});
this.scheduleRefresh(true);
this.refreshTimer = setInterval(() => {
this.dashData = getAutoDashboardData();
this.loadData().then(() => {
this.invalidate();
this.tui.requestRender();
});
this.scheduleRefresh();
}, 2000);
}
private async loadData(): Promise<void> {
private scheduleRefresh(initial = false): void {
if (this.refreshInFlight) return;
this.refreshInFlight = this.refreshDashboard(initial)
.finally(() => {
this.refreshInFlight = null;
});
}
private computeDashboardIdentity(dashData: AutoDashboardData): string {
const base = dashData.basePath || process.cwd();
const currentUnit = dashData.currentUnit
? `${dashData.currentUnit.type}:${dashData.currentUnit.id}:${dashData.currentUnit.startedAt}`
: "-";
const lastCompleted = dashData.completedUnits.length > 0
? dashData.completedUnits[dashData.completedUnits.length - 1]
: null;
const completedKey = lastCompleted
? `${dashData.completedUnits.length}:${lastCompleted.type}:${lastCompleted.id}:${lastCompleted.finishedAt}`
: "0";
return [
base,
dashData.active ? "1" : "0",
dashData.paused ? "1" : "0",
currentUnit,
completedKey,
].join("|");
}
private async refreshDashboard(initial = false): Promise<void> {
this.dashData = getAutoDashboardData();
const nextIdentity = this.computeDashboardIdentity(this.dashData);
if (initial || nextIdentity !== this.loadedDashboardIdentity) {
const loaded = await this.loadData();
if (loaded) {
this.loadedDashboardIdentity = nextIdentity;
}
}
if (initial) {
this.loading = false;
}
this.invalidate();
this.tui.requestRender();
}
private async loadData(): Promise<boolean> {
const base = this.dashData.basePath || process.cwd();
try {
const state = await deriveState(base);
if (!state.activeMilestone) {
this.milestoneData = null;
return;
return true;
}
const mid = state.activeMilestone.id;
@ -175,8 +216,10 @@ export class GSDDashboardOverlay {
}
this.milestoneData = view;
return true;
} catch {
// Don't crash the overlay
return false;
}
}

View file

@ -0,0 +1,127 @@
// Tests for saveActivityLog performance behavior:
// - cache next sequence per activity directory instead of rescanning every save
// - skip rewriting identical snapshots for the same unit
// - recover safely if another writer creates the cached next sequence
import { existsSync, mkdtempSync, readdirSync, rmSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { saveActivityLog } from "../activity-log.ts";
import { createTestContext } from "./test-helpers.ts";
const { assertEq, assertTrue, report } = createTestContext();
let tmpDirs: string[] = [];
function createBaseDir(): string {
const dir = mkdtempSync(join(tmpdir(), "gsd-activity-save-test-"));
tmpDirs.push(dir);
return dir;
}
function activityDir(baseDir: string): string {
return join(baseDir, ".gsd", "activity");
}
function listActivityFiles(baseDir: string): string[] {
const dir = activityDir(baseDir);
return existsSync(dir) ? readdirSync(dir).sort() : [];
}
function createCtx(entries: unknown[]) {
return {
sessionManager: {
getEntries: () => entries,
},
};
}
function cleanup(): void {
for (const dir of tmpDirs) {
rmSync(dir, { recursive: true, force: true });
}
tmpDirs = [];
}
process.on("exit", cleanup);
async function main(): Promise<void> {
console.log("\n── (a) cache next sequence instead of rescanning every save");
{
const baseDir = createBaseDir();
saveActivityLog(createCtx([{ kind: "first", n: 1 }]) as any, baseDir, "execute-task", "M001/S01/T01");
writeFileSync(
join(activityDir(baseDir), "999-external-manual.jsonl"),
'{"external":true}\n',
"utf-8",
);
saveActivityLog(createCtx([{ kind: "second", n: 2 }]) as any, baseDir, "execute-task", "M001/S01/T02");
const files = listActivityFiles(baseDir);
assertTrue(files.includes("001-execute-task-M001-S01-T01.jsonl"), "(a) first save uses sequence 001");
assertTrue(files.includes("002-execute-task-M001-S01-T02.jsonl"), "(a) second save uses cached next sequence 002");
assertTrue(files.includes("999-external-manual.jsonl"), "(a) externally added file remains present");
assertTrue(!files.some(file => file.startsWith("1000-")), "(a) second save did not rescan and jump to sequence 1000");
}
console.log("\n── (b) skip rewriting identical snapshots for the same unit");
{
const baseDir = createBaseDir();
const ctx = createCtx([{ role: "assistant", content: "same snapshot" }]);
saveActivityLog(ctx as any, baseDir, "plan-slice", "M002/S01");
saveActivityLog(ctx as any, baseDir, "plan-slice", "M002/S01");
let files = listActivityFiles(baseDir);
assertEq(files.length, 1, "(b) identical repeated save writes only one activity file");
assertTrue(files[0] === "001-plan-slice-M002-S01.jsonl", "(b) the original sequence is preserved");
saveActivityLog(createCtx([{ role: "assistant", content: "changed snapshot" }]) as any, baseDir, "plan-slice", "M002/S01");
files = listActivityFiles(baseDir);
assertEq(files.length, 2, "(b) changed snapshot writes a new activity file");
assertTrue(files.includes("002-plan-slice-M002-S01.jsonl"), "(b) deduped save did not consume the next sequence");
}
console.log("\n── (c) recover if another writer creates the exact cached target file");
{
const baseDir = createBaseDir();
saveActivityLog(createCtx([{ turn: 1 }]) as any, baseDir, "execute-task", "M003/S02/T01");
writeFileSync(
join(activityDir(baseDir), "002-execute-task-M003-S02-T02.jsonl"),
'{"collision":true}\n',
"utf-8",
);
saveActivityLog(createCtx([{ turn: 2 }]) as any, baseDir, "execute-task", "M003/S02/T02");
const files = listActivityFiles(baseDir);
assertTrue(files.includes("002-execute-task-M003-S02-T02.jsonl"), "(c) exact collision file is preserved");
assertTrue(files.includes("003-execute-task-M003-S02-T02.jsonl"), "(c) logger rescans only on collision and advances to 003");
}
console.log("\n── (d) dedupe is tracked per unit, not just the last write in the directory");
{
const baseDir = createBaseDir();
const repeatedCtx = createCtx([{ role: "assistant", content: "same-for-unit-a" }]);
saveActivityLog(repeatedCtx as any, baseDir, "execute-task", "M004/S01/T01");
saveActivityLog(createCtx([{ role: "assistant", content: "other-unit" }]) as any, baseDir, "execute-task", "M004/S01/T02");
saveActivityLog(repeatedCtx as any, baseDir, "execute-task", "M004/S01/T01");
const files = listActivityFiles(baseDir);
assertEq(files.length, 2, "(d) interleaving another unit does not force a duplicate rewrite for unit A");
assertTrue(files.includes("001-execute-task-M004-S01-T01.jsonl"), "(d) original unit A snapshot is retained");
assertTrue(files.includes("002-execute-task-M004-S01-T02.jsonl"), "(d) unit B snapshot is retained");
}
report();
}
main().catch((error) => {
console.error(error);
process.exit(1);
});