refactor: centralize magic numbers into constants.ts (#1044)
Extracts 11 hardcoded timeout, retry, compaction, and tool-default values from 9 source files into a single constants.ts module. Each source file now imports from the central definition, eliminating duplicated literals and making tuning a single-file change. Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
6a2a6a9e2c
commit
27e79f76b3
10 changed files with 89 additions and 18 deletions
|
|
@ -20,6 +20,7 @@ import { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync } from "f
|
|||
import { dirname, join } from "path";
|
||||
import lockfile from "proper-lockfile";
|
||||
import { getAgentDir } from "../config.js";
|
||||
import { AUTH_LOCK_STALE_MS } from "./constants.js";
|
||||
import { resolveConfigValue } from "./resolve-config-value.js";
|
||||
|
||||
export type ApiKeyCredential = {
|
||||
|
|
@ -136,7 +137,7 @@ export class FileAuthStorageBackend implements AuthStorageBackend {
|
|||
maxTimeout: 10000,
|
||||
randomize: true,
|
||||
},
|
||||
stale: 30000,
|
||||
stale: AUTH_LOCK_STALE_MS,
|
||||
onCompromised: (err) => {
|
||||
lockCompromised = true;
|
||||
lockCompromisedError = err;
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
import type { AgentMessage } from "@gsd/pi-agent-core";
|
||||
import type { Model } from "@gsd/pi-ai";
|
||||
import { completeSimple } from "@gsd/pi-ai";
|
||||
import { COMPACTION_RESERVE_TOKENS } from "../constants.js";
|
||||
import {
|
||||
convertToLlm,
|
||||
createBranchSummaryMessage,
|
||||
|
|
@ -281,7 +282,7 @@ export async function generateBranchSummary(
|
|||
entries: SessionEntry[],
|
||||
options: GenerateBranchSummaryOptions,
|
||||
): Promise<BranchSummaryResult> {
|
||||
const { model, apiKey, signal, customInstructions, replaceInstructions, reserveTokens = 16384 } = options;
|
||||
const { model, apiKey, signal, customInstructions, replaceInstructions, reserveTokens = COMPACTION_RESERVE_TOKENS } = options;
|
||||
|
||||
// Token budget = context window minus reserved space for prompt + response
|
||||
const contextWindow = model.contextWindow || 128000;
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
import type { AgentMessage } from "@gsd/pi-agent-core";
|
||||
import type { AssistantMessage, Model, Usage } from "@gsd/pi-ai";
|
||||
import { completeSimple } from "@gsd/pi-ai";
|
||||
import { COMPACTION_KEEP_RECENT_TOKENS, COMPACTION_RESERVE_TOKENS } from "../constants.js";
|
||||
import {
|
||||
convertToLlm,
|
||||
createBranchSummaryMessage,
|
||||
|
|
@ -113,8 +114,8 @@ export interface CompactionSettings {
|
|||
|
||||
export const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {
|
||||
enabled: true,
|
||||
reserveTokens: 16384,
|
||||
keepRecentTokens: 20000,
|
||||
reserveTokens: COMPACTION_RESERVE_TOKENS,
|
||||
keepRecentTokens: COMPACTION_KEEP_RECENT_TOKENS,
|
||||
};
|
||||
|
||||
// ============================================================================
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
import type { AgentMessage } from "@gsd/pi-agent-core";
|
||||
import type { Message } from "@gsd/pi-ai";
|
||||
import { TOOL_RESULT_MAX_CHARS } from "../constants.js";
|
||||
|
||||
// ============================================================================
|
||||
// File Operation Tracking
|
||||
|
|
@ -85,8 +86,7 @@ export function formatFileOperations(readFiles: string[], modifiedFiles: string[
|
|||
// Message Serialization
|
||||
// ============================================================================
|
||||
|
||||
/** Maximum characters for a tool result in serialized summaries. */
|
||||
const TOOL_RESULT_MAX_CHARS = 2000;
|
||||
// TOOL_RESULT_MAX_CHARS imported from ../constants.js
|
||||
|
||||
/**
|
||||
* Truncate text to a maximum character length for summarization.
|
||||
|
|
|
|||
59
packages/pi-coding-agent/src/core/constants.ts
Normal file
59
packages/pi-coding-agent/src/core/constants.ts
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
/**
|
||||
* Centralized configuration constants for the coding agent.
|
||||
*
|
||||
* Values grouped by subsystem. Each constant documents where it is consumed
|
||||
* so that changes can be audited in one place.
|
||||
*/
|
||||
|
||||
// =============================================================================
|
||||
// Timeouts
|
||||
// =============================================================================
|
||||
|
||||
/** Shell command execution timeout used by resolve-config-value. */
|
||||
export const COMMAND_EXECUTION_TIMEOUT_MS = 10_000;
|
||||
|
||||
/** LSP server liveness check timeout (lspmux). */
|
||||
export const LSP_LIVENESS_TIMEOUT_MS = 1_000;
|
||||
|
||||
/** Staleness threshold for the async auth-storage file lock. */
|
||||
export const AUTH_LOCK_STALE_MS = 30_000;
|
||||
|
||||
// =============================================================================
|
||||
// Caches
|
||||
// =============================================================================
|
||||
|
||||
/** TTL for the cached lspmux state detection result. */
|
||||
export const LSP_STATE_CACHE_TTL_MS = 5 * 60 * 1_000;
|
||||
|
||||
// =============================================================================
|
||||
// Compaction & Summarization
|
||||
// =============================================================================
|
||||
|
||||
/** Tokens reserved for the LLM prompt + response during compaction and branch summarization. */
|
||||
export const COMPACTION_RESERVE_TOKENS = 16_384;
|
||||
|
||||
/** Tokens from the tail of the conversation kept verbatim after compaction. */
|
||||
export const COMPACTION_KEEP_RECENT_TOKENS = 20_000;
|
||||
|
||||
/** Max characters kept per tool-result block when serializing for summarization. */
|
||||
export const TOOL_RESULT_MAX_CHARS = 2_000;
|
||||
|
||||
// =============================================================================
|
||||
// Retry
|
||||
// =============================================================================
|
||||
|
||||
/** Base delay for exponential back-off retries (2 s, 4 s, 8 s ...). */
|
||||
export const RETRY_BASE_DELAY_MS = 2_000;
|
||||
|
||||
/** Maximum server-requested delay before the retry loop gives up. */
|
||||
export const RETRY_MAX_DELAY_MS = 300_000;
|
||||
|
||||
// =============================================================================
|
||||
// Tool Defaults
|
||||
// =============================================================================
|
||||
|
||||
/** Default result-count cap for the find/glob tool. */
|
||||
export const FIND_DEFAULT_LIMIT = 1_000;
|
||||
|
||||
/** Default line-count cap for tool-output truncation. */
|
||||
export const TRUNCATE_DEFAULT_MAX_LINES = 2_000;
|
||||
|
|
@ -2,6 +2,7 @@ import { execSync, spawn } from "node:child_process";
|
|||
import * as fsPromises from "node:fs/promises";
|
||||
import * as os from "node:os";
|
||||
import * as path from "node:path";
|
||||
import { LSP_LIVENESS_TIMEOUT_MS, LSP_STATE_CACHE_TTL_MS } from "../constants.js";
|
||||
|
||||
/**
|
||||
* lspmux integration for LSP server multiplexing.
|
||||
|
|
@ -41,8 +42,6 @@ const DEFAULT_SUPPORTED_SERVERS = new Set([
|
|||
"rust-analyzer",
|
||||
]);
|
||||
|
||||
const LIVENESS_TIMEOUT_MS = 1000;
|
||||
const STATE_CACHE_TTL_MS = 5 * 60 * 1000;
|
||||
|
||||
// =============================================================================
|
||||
// Helpers
|
||||
|
|
@ -108,7 +107,7 @@ async function checkServerRunning(binaryPath: string): Promise<boolean> {
|
|||
new Promise<number>((resolve) => {
|
||||
proc.on("exit", (code: number | null) => resolve(code ?? 1));
|
||||
}),
|
||||
new Promise<null>(resolve => setTimeout(() => resolve(null), LIVENESS_TIMEOUT_MS)),
|
||||
new Promise<null>(resolve => setTimeout(() => resolve(null), LSP_LIVENESS_TIMEOUT_MS)),
|
||||
]);
|
||||
|
||||
if (exited === null) {
|
||||
|
|
@ -124,7 +123,7 @@ async function checkServerRunning(binaryPath: string): Promise<boolean> {
|
|||
|
||||
export async function detectLspmux(): Promise<LspmuxState> {
|
||||
const now = Date.now();
|
||||
if (cachedState && now - cacheTimestamp < STATE_CACHE_TTL_MS) {
|
||||
if (cachedState && now - cacheTimestamp < LSP_STATE_CACHE_TTL_MS) {
|
||||
return cachedState;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
*/
|
||||
|
||||
import { execFileSync } from "child_process";
|
||||
import { COMMAND_EXECUTION_TIMEOUT_MS } from "./constants.js";
|
||||
|
||||
const SHELL_OPERATORS = /[;|&`$><]/;
|
||||
|
||||
|
|
@ -60,7 +61,7 @@ function executeCommand(commandConfig: string): string | undefined {
|
|||
try {
|
||||
const output = execFileSync(firstToken, tokens.slice(1), {
|
||||
encoding: "utf-8",
|
||||
timeout: 10000,
|
||||
timeout: COMMAND_EXECUTION_TIMEOUT_MS,
|
||||
stdio: ["ignore", "pipe", "ignore"],
|
||||
});
|
||||
result = output.trim() || undefined;
|
||||
|
|
|
|||
|
|
@ -3,6 +3,12 @@ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
|
|||
import { dirname, join } from "path";
|
||||
import lockfile from "proper-lockfile";
|
||||
import { CONFIG_DIR_NAME, getAgentDir } from "../config.js";
|
||||
import {
|
||||
COMPACTION_KEEP_RECENT_TOKENS,
|
||||
COMPACTION_RESERVE_TOKENS,
|
||||
RETRY_BASE_DELAY_MS,
|
||||
RETRY_MAX_DELAY_MS,
|
||||
} from "./constants.js";
|
||||
import type { BashInterceptorRule } from "./tools/bash-interceptor.js";
|
||||
|
||||
export interface CompactionSettings {
|
||||
|
|
@ -710,11 +716,11 @@ export class SettingsManager {
|
|||
}
|
||||
|
||||
getCompactionReserveTokens(): number {
|
||||
return this.settings.compaction?.reserveTokens ?? 16384;
|
||||
return this.settings.compaction?.reserveTokens ?? COMPACTION_RESERVE_TOKENS;
|
||||
}
|
||||
|
||||
getCompactionKeepRecentTokens(): number {
|
||||
return this.settings.compaction?.keepRecentTokens ?? 20000;
|
||||
return this.settings.compaction?.keepRecentTokens ?? COMPACTION_KEEP_RECENT_TOKENS;
|
||||
}
|
||||
|
||||
getCompactionSettings(): { enabled: boolean; reserveTokens: number; keepRecentTokens: number } {
|
||||
|
|
@ -727,7 +733,7 @@ export class SettingsManager {
|
|||
|
||||
getBranchSummarySettings(): { reserveTokens: number; skipPrompt: boolean } {
|
||||
return {
|
||||
reserveTokens: this.settings.branchSummary?.reserveTokens ?? 16384,
|
||||
reserveTokens: this.settings.branchSummary?.reserveTokens ?? COMPACTION_RESERVE_TOKENS,
|
||||
skipPrompt: this.settings.branchSummary?.skipPrompt ?? false,
|
||||
};
|
||||
}
|
||||
|
|
@ -753,8 +759,8 @@ export class SettingsManager {
|
|||
return {
|
||||
enabled: this.getRetryEnabled(),
|
||||
maxRetries: this.settings.retry?.maxRetries ?? 3,
|
||||
baseDelayMs: this.settings.retry?.baseDelayMs ?? 2000,
|
||||
maxDelayMs: this.settings.retry?.maxDelayMs ?? 300000,
|
||||
baseDelayMs: this.settings.retry?.baseDelayMs ?? RETRY_BASE_DELAY_MS,
|
||||
maxDelayMs: this.settings.retry?.maxDelayMs ?? RETRY_MAX_DELAY_MS,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { glob as nativeGlob } from "@gsd/native/glob";
|
|||
import { type Static, Type } from "@sinclair/typebox";
|
||||
import { existsSync } from "fs";
|
||||
import path from "path";
|
||||
import { FIND_DEFAULT_LIMIT } from "../constants.js";
|
||||
import { resolveToCwd } from "./path-utils.js";
|
||||
import { DEFAULT_MAX_BYTES, formatSize, type TruncationResult, truncateHead } from "./truncate.js";
|
||||
|
||||
|
|
@ -16,7 +17,7 @@ const findSchema = Type.Object({
|
|||
|
||||
export type FindToolInput = Static<typeof findSchema>;
|
||||
|
||||
const DEFAULT_LIMIT = 1000;
|
||||
const DEFAULT_LIMIT = FIND_DEFAULT_LIMIT;
|
||||
|
||||
export interface FindToolDetails {
|
||||
truncation?: TruncationResult;
|
||||
|
|
|
|||
|
|
@ -8,7 +8,9 @@
|
|||
* Never returns partial lines (except bash tail truncation edge case).
|
||||
*/
|
||||
|
||||
export const DEFAULT_MAX_LINES = 2000;
|
||||
import { TRUNCATE_DEFAULT_MAX_LINES } from "../constants.js";
|
||||
|
||||
export const DEFAULT_MAX_LINES = TRUNCATE_DEFAULT_MAX_LINES;
|
||||
export const DEFAULT_MAX_BYTES = 50 * 1024; // 50KB
|
||||
export const GREP_MAX_LINE_LENGTH = 500; // Max chars per grep match line
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue