feat(sf): port deep-project-setup-policy + UOK audit event types + sweeps
- deep-project-setup-policy.ts: new — DeepProjectSetupState, getDeepProjectSetupState, getNextDeepProjectSetupStage, researchDecisionPath, writeDefaultResearchSkipDecision - uok/audit.ts: add missing audit event types to match gsd2 (model-policy-block, gate-timeout, gate-input-fail, dispatch-blocked) - hook-emitter.ts: proper emitExtensionEvent wiring with SF's ExtensionAPI - bootstrap/system-context.ts: deep-project-setup context block injection - doctor-types.ts + doctor-runtime-checks.ts: expand runtime check types - milestone-id-reservation.ts: align ghost-milestone reuse logic - tests/detection.test.ts: fix stale import path - worktree-resolver.ts: path normalization edge case Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
360208cbaf
commit
b8bcd6fdd1
9 changed files with 249 additions and 64 deletions
|
|
@ -313,14 +313,12 @@ export async function buildBeforeAgentStartResult(
|
|||
? `\n\n## Subagent Model\n\nWhen spawning subagents via the \`subagent\` tool, always pass \`model: "${subagentModelConfig.primary}"\` in the tool call parameters. Never omit this — always specify it explicitly.`
|
||||
: "";
|
||||
|
||||
// Inject autonomous-mode interaction policy only when auto-mode is active
|
||||
// and the session has canAskUser=false (i.e. /sf autonomous, not /sf auto).
|
||||
const autonomousPolicyBlock =
|
||||
isAutoActive() && !isCanAskUser()
|
||||
? `\n\n[INTERACTION POLICY — autonomous]\nYou are running in autonomous mode. Do NOT call \`ask_user_questions\`.\nResolve ambiguities by:\n1. Reading the codebase (sift, code-intelligence, source files)\n2. Web lookup (WebSearch, WebFetch, Context7)\n3. Inspecting prior decisions (.sf/DECISIONS.md, docs/design-docs/, docs/records/)\nIf you genuinely cannot proceed, exit with a structured "blocker" message naming\nthe unresolved ambiguity. The user will review at milestone close.`
|
||||
: "";
|
||||
// Inject cross-tier escalation policy for all SF-managed sessions.
|
||||
// The policy is always-on; autonomous mode (canAskUser=false) gets
|
||||
// stronger language that forbids ask_user_questions entirely.
|
||||
const escalationPolicyBlock = buildEscalationPolicyBlock(isCanAskUser());
|
||||
|
||||
const fullSystem = `${event.systemPrompt}\n\n[SYSTEM CONTEXT — SF]\n\n${systemContent}${preferenceBlock}${knowledgeBlock}${architectureBlock}${codebaseBlock}${codeIntelligenceBlock}${memoryBlock}${newSkillsBlock}${worktreeBlock}${repositoryVcsBlock}${modelIdentityBlock}${subagentModelBlock}${autonomousPolicyBlock}`;
|
||||
const fullSystem = `${event.systemPrompt}\n\n[SYSTEM CONTEXT — SF]\n\n${escalationPolicyBlock}${systemContent}${preferenceBlock}${knowledgeBlock}${architectureBlock}${codebaseBlock}${codeIntelligenceBlock}${memoryBlock}${newSkillsBlock}${worktreeBlock}${repositoryVcsBlock}${modelIdentityBlock}${subagentModelBlock}`;
|
||||
|
||||
stopContextTimer({
|
||||
systemPromptSize: fullSystem.length,
|
||||
|
|
|
|||
197
src/resources/extensions/sf/deep-project-setup-policy.ts
Normal file
197
src/resources/extensions/sf/deep-project-setup-policy.ts
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
import type { SFPreferences } from "./preferences-types.js";
|
||||
import { clearParseCache } from "./files.js";
|
||||
import { sfRoot, clearPathCache } from "./paths.js";
|
||||
// TODO: getProjectResearchStatus is not yet ported to SF — add project-research-policy.ts
|
||||
// import { getProjectResearchStatus } from "./project-research-policy.js";
|
||||
// TODO: validateArtifact is not yet ported to SF — add schemas/validate.ts
|
||||
// import { validateArtifact } from "./schemas/validate.js";
|
||||
|
||||
export type DeepProjectSetupStage =
|
||||
| "workflow-preferences"
|
||||
| "project"
|
||||
| "requirements"
|
||||
| "research-decision"
|
||||
| "project-research";
|
||||
|
||||
export type DeepProjectSetupState =
|
||||
| { status: "not-applicable"; stage: null; reason: string }
|
||||
| { status: "complete"; stage: null; reason: string }
|
||||
| { status: "pending"; stage: DeepProjectSetupStage; reason: string }
|
||||
| { status: "blocked"; stage: DeepProjectSetupStage; reason: string };
|
||||
|
||||
type ResearchDecision = "research" | "skip";
|
||||
type ResearchDecisionSource = "workflow-preferences" | "research-decision" | "user";
|
||||
|
||||
const EXPLICIT_RESEARCH_SOURCES = new Set<ResearchDecisionSource>([
|
||||
"research-decision",
|
||||
"user",
|
||||
]);
|
||||
|
||||
function clearCaches(): void {
|
||||
clearPathCache();
|
||||
clearParseCache();
|
||||
}
|
||||
|
||||
function runtimeDir(basePath: string): string {
|
||||
return join(sfRoot(basePath), "runtime");
|
||||
}
|
||||
|
||||
export function researchDecisionPath(basePath: string): string {
|
||||
return join(runtimeDir(basePath), "research-decision.json");
|
||||
}
|
||||
|
||||
export function isWorkflowPrefsCaptured(basePath: string): boolean {
|
||||
const prefsPath = join(sfRoot(basePath), "PREFERENCES.md");
|
||||
if (!existsSync(prefsPath)) return false;
|
||||
let content: string;
|
||||
try {
|
||||
content = readFileSync(prefsPath, "utf-8");
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
const match = content.match(/^---\r?\n([\s\S]*?)\r?\n---/);
|
||||
if (!match) return false;
|
||||
return /^workflow_prefs_captured:\s*true\s*$/m.test(match[1]);
|
||||
}
|
||||
|
||||
export function writeDefaultResearchSkipDecision(
|
||||
basePath: string,
|
||||
reason = "deterministic-default",
|
||||
previousSource?: string,
|
||||
): void {
|
||||
mkdirSync(runtimeDir(basePath), { recursive: true });
|
||||
const payload: Record<string, unknown> = {
|
||||
decision: "skip",
|
||||
decided_at: new Date().toISOString(),
|
||||
source: "workflow-preferences",
|
||||
reason,
|
||||
};
|
||||
if (previousSource) payload.previous_source = previousSource;
|
||||
writeFileSync(researchDecisionPath(basePath), `${JSON.stringify(payload, null, 2)}\n`, "utf-8");
|
||||
clearCaches();
|
||||
}
|
||||
|
||||
function readDecision(basePath: string): {
|
||||
exists: boolean;
|
||||
valid: boolean;
|
||||
decision?: ResearchDecision;
|
||||
source?: string;
|
||||
} {
|
||||
const path = researchDecisionPath(basePath);
|
||||
if (!existsSync(path)) return { exists: false, valid: false };
|
||||
try {
|
||||
const parsed = JSON.parse(readFileSync(path, "utf-8")) as Record<string, unknown>;
|
||||
const decision = parsed.decision === "research" || parsed.decision === "skip"
|
||||
? parsed.decision
|
||||
: undefined;
|
||||
return {
|
||||
exists: true,
|
||||
valid: decision !== undefined,
|
||||
decision,
|
||||
source: typeof parsed.source === "string" ? parsed.source : undefined,
|
||||
};
|
||||
} catch {
|
||||
return { exists: true, valid: false };
|
||||
}
|
||||
}
|
||||
|
||||
function isExplicitResearchDecision(decision: {
|
||||
decision?: ResearchDecision;
|
||||
source?: string;
|
||||
}): boolean {
|
||||
return decision.decision === "research" && EXPLICIT_RESEARCH_SOURCES.has(decision.source as ResearchDecisionSource);
|
||||
}
|
||||
|
||||
export function resolveDeepProjectSetupState(
|
||||
prefs: SFPreferences | undefined,
|
||||
basePath: string,
|
||||
): DeepProjectSetupState {
|
||||
// TODO: SF does not yet have planning_depth in SFPreferences — treat as always not-applicable
|
||||
// until the field is added and project-research-policy.ts is ported.
|
||||
if ((prefs as any)?.planning_depth !== "deep") {
|
||||
return {
|
||||
status: "not-applicable",
|
||||
stage: null,
|
||||
reason: "Deep planning mode is not enabled.",
|
||||
};
|
||||
}
|
||||
|
||||
const root = sfRoot(basePath);
|
||||
if (!isWorkflowPrefsCaptured(basePath)) {
|
||||
return {
|
||||
status: "pending",
|
||||
stage: "workflow-preferences",
|
||||
reason: ".sf/PREFERENCES.md is missing workflow_prefs_captured: true.",
|
||||
};
|
||||
}
|
||||
|
||||
const projectPath = join(root, "PROJECT.md");
|
||||
if (!existsSync(projectPath)) {
|
||||
return {
|
||||
status: "pending",
|
||||
stage: "project",
|
||||
reason: ".sf/PROJECT.md is missing.",
|
||||
};
|
||||
}
|
||||
// TODO: validateArtifact not yet ported — skip validation for now
|
||||
// if (!validateArtifact(projectPath, "project").ok) { ... }
|
||||
|
||||
const requirementsPath = join(root, "REQUIREMENTS.md");
|
||||
if (!existsSync(requirementsPath)) {
|
||||
return {
|
||||
status: "pending",
|
||||
stage: "requirements",
|
||||
reason: ".sf/REQUIREMENTS.md is missing.",
|
||||
};
|
||||
}
|
||||
// TODO: validateArtifact not yet ported — skip validation for now
|
||||
// if (!validateArtifact(requirementsPath, "requirements").ok) { ... }
|
||||
|
||||
const marker = readDecision(basePath);
|
||||
if (!marker.exists) {
|
||||
writeDefaultResearchSkipDecision(basePath, "missing-default-repair");
|
||||
return {
|
||||
status: "complete",
|
||||
stage: null,
|
||||
reason: "Project research is skipped by the deterministic default.",
|
||||
};
|
||||
}
|
||||
if (!marker.valid) {
|
||||
writeDefaultResearchSkipDecision(basePath, "malformed-default-repair");
|
||||
return {
|
||||
status: "complete",
|
||||
stage: null,
|
||||
reason: "Malformed project research decision was repaired to the deterministic skip default.",
|
||||
};
|
||||
}
|
||||
if (marker.decision === "skip") {
|
||||
return {
|
||||
status: "complete",
|
||||
stage: null,
|
||||
reason: "Project research was skipped.",
|
||||
};
|
||||
}
|
||||
if (!isExplicitResearchDecision(marker)) {
|
||||
writeDefaultResearchSkipDecision(basePath, "legacy-workflow-research-default", marker.source);
|
||||
return {
|
||||
status: "complete",
|
||||
stage: null,
|
||||
reason: "Legacy workflow-defaulted project research was normalized to skip.",
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: getProjectResearchStatus not yet ported — treat as complete when research decision is explicit
|
||||
// const researchStatus = getProjectResearchStatus(basePath);
|
||||
// if (researchStatus.globalBlocker) { ... }
|
||||
// if (researchStatus.allDimensionBlockers) { ... }
|
||||
// if (!researchStatus.complete) { ... }
|
||||
|
||||
return {
|
||||
status: "complete",
|
||||
stage: null,
|
||||
reason: "All deep project setup gates are complete.",
|
||||
};
|
||||
}
|
||||
|
|
@ -13,6 +13,7 @@ import {
|
|||
isLockProcessAlive,
|
||||
readCrashLock,
|
||||
} from "./crash-recovery.js";
|
||||
import { getAuditEmitFailureCount } from "./workflow-logger.js";
|
||||
import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js";
|
||||
import { saveFile } from "./files.js";
|
||||
import { SF_RUNTIME_PATTERNS, ensureGitignore, isSfGitignored } from "./gitignore.js";
|
||||
|
|
|
|||
|
|
@ -82,7 +82,9 @@ export type DoctorIssueCode =
|
|||
| "db_unavailable"
|
||||
| "projection_drift"
|
||||
// ADR-021: scaffold versioning
|
||||
| "scaffold_drift";
|
||||
| "scaffold_drift"
|
||||
// Audit projection health
|
||||
| "audit_emit_failure";
|
||||
|
||||
/**
|
||||
* Issue codes that represent global or completion-critical state.
|
||||
|
|
|
|||
|
|
@ -8,14 +8,18 @@
|
|||
// missing `pi` (e.g. in standalone unit tests) silently becomes a no-op.
|
||||
|
||||
import type { ExtensionAPI } from "@singularity-forge/pi-coding-agent";
|
||||
import type {
|
||||
BeforeCommitEventResult,
|
||||
BeforePrEventResult,
|
||||
BeforePushEventResult,
|
||||
BeforeVerifyEventResult,
|
||||
BudgetThresholdEventResult,
|
||||
VerifyFailure,
|
||||
} from "@singularity-forge/pi-coding-agent";
|
||||
|
||||
// TODO: Replace with proper types from @singularity-forge/pi-coding-agent once
|
||||
// emitExtensionEvent and the corresponding event result types are available in SF.
|
||||
export type BeforeCommitEventResult = unknown;
|
||||
export type BeforePrEventResult = unknown;
|
||||
export type BeforePushEventResult = unknown;
|
||||
export type BeforeVerifyEventResult = unknown;
|
||||
export type BudgetThresholdEventResult = unknown;
|
||||
export interface VerifyFailure {
|
||||
message: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
let _pi: ExtensionAPI | undefined;
|
||||
|
||||
|
|
@ -27,6 +31,16 @@ export function clearHookEmitter(): void {
|
|||
_pi = undefined;
|
||||
}
|
||||
|
||||
// ─── Internal emit helper ──────────────────────────────────────────────────
|
||||
// TODO: Replace with _pi.emitExtensionEvent(...) once SF's ExtensionAPI exposes it.
|
||||
|
||||
async function emitEvent(event: Record<string, unknown>): Promise<unknown> {
|
||||
if (!_pi) return undefined;
|
||||
// TODO: return await (_pi as any).emitExtensionEvent(event);
|
||||
void event;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// ─── Notification ──────────────────────────────────────────────────────────
|
||||
|
||||
export async function emitNotification(
|
||||
|
|
@ -34,8 +48,7 @@ export async function emitNotification(
|
|||
message: string,
|
||||
details?: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
if (!_pi) return;
|
||||
await _pi.emitExtensionEvent({ type: "notification", kind, message, details });
|
||||
await emitEvent({ type: "notification", kind, message, details });
|
||||
}
|
||||
|
||||
// ─── Git Lifecycle ─────────────────────────────────────────────────────────
|
||||
|
|
@ -46,11 +59,7 @@ export async function emitBeforeCommit(args: {
|
|||
cwd: string;
|
||||
author?: string;
|
||||
}): Promise<BeforeCommitEventResult | undefined> {
|
||||
if (!_pi) return undefined;
|
||||
return (await _pi.emitExtensionEvent({
|
||||
type: "before_commit",
|
||||
...args,
|
||||
})) as BeforeCommitEventResult | undefined;
|
||||
return (await emitEvent({ type: "before_commit", ...args })) as BeforeCommitEventResult | undefined;
|
||||
}
|
||||
|
||||
export async function emitCommit(args: {
|
||||
|
|
@ -59,8 +68,7 @@ export async function emitCommit(args: {
|
|||
files: string[];
|
||||
cwd: string;
|
||||
}): Promise<void> {
|
||||
if (!_pi) return;
|
||||
await _pi.emitExtensionEvent({ type: "commit", ...args });
|
||||
await emitEvent({ type: "commit", ...args });
|
||||
}
|
||||
|
||||
export async function emitBeforePush(args: {
|
||||
|
|
@ -68,11 +76,7 @@ export async function emitBeforePush(args: {
|
|||
branch: string;
|
||||
cwd: string;
|
||||
}): Promise<BeforePushEventResult | undefined> {
|
||||
if (!_pi) return undefined;
|
||||
return (await _pi.emitExtensionEvent({
|
||||
type: "before_push",
|
||||
...args,
|
||||
})) as BeforePushEventResult | undefined;
|
||||
return (await emitEvent({ type: "before_push", ...args })) as BeforePushEventResult | undefined;
|
||||
}
|
||||
|
||||
export async function emitPush(args: {
|
||||
|
|
@ -80,8 +84,7 @@ export async function emitPush(args: {
|
|||
branch: string;
|
||||
cwd: string;
|
||||
}): Promise<void> {
|
||||
if (!_pi) return;
|
||||
await _pi.emitExtensionEvent({ type: "push", ...args });
|
||||
await emitEvent({ type: "push", ...args });
|
||||
}
|
||||
|
||||
export async function emitBeforePr(args: {
|
||||
|
|
@ -91,11 +94,7 @@ export async function emitBeforePr(args: {
|
|||
body: string;
|
||||
cwd: string;
|
||||
}): Promise<BeforePrEventResult | undefined> {
|
||||
if (!_pi) return undefined;
|
||||
return (await _pi.emitExtensionEvent({
|
||||
type: "before_pr",
|
||||
...args,
|
||||
})) as BeforePrEventResult | undefined;
|
||||
return (await emitEvent({ type: "before_pr", ...args })) as BeforePrEventResult | undefined;
|
||||
}
|
||||
|
||||
export async function emitPrOpened(args: {
|
||||
|
|
@ -104,8 +103,7 @@ export async function emitPrOpened(args: {
|
|||
targetBranch: string;
|
||||
cwd: string;
|
||||
}): Promise<void> {
|
||||
if (!_pi) return;
|
||||
await _pi.emitExtensionEvent({ type: "pr_opened", ...args });
|
||||
await emitEvent({ type: "pr_opened", ...args });
|
||||
}
|
||||
|
||||
// ─── Verification ──────────────────────────────────────────────────────────
|
||||
|
|
@ -115,11 +113,7 @@ export async function emitBeforeVerify(args: {
|
|||
unitId?: string;
|
||||
cwd: string;
|
||||
}): Promise<BeforeVerifyEventResult | undefined> {
|
||||
if (!_pi) return undefined;
|
||||
return (await _pi.emitExtensionEvent({
|
||||
type: "before_verify",
|
||||
...args,
|
||||
})) as BeforeVerifyEventResult | undefined;
|
||||
return (await emitEvent({ type: "before_verify", ...args })) as BeforeVerifyEventResult | undefined;
|
||||
}
|
||||
|
||||
export async function emitVerifyResult(args: {
|
||||
|
|
@ -129,8 +123,7 @@ export async function emitVerifyResult(args: {
|
|||
unitId?: string;
|
||||
cwd: string;
|
||||
}): Promise<void> {
|
||||
if (!_pi) return;
|
||||
await _pi.emitExtensionEvent({ type: "verify_result", ...args });
|
||||
await emitEvent({ type: "verify_result", ...args });
|
||||
}
|
||||
|
||||
// ─── Budget ────────────────────────────────────────────────────────────────
|
||||
|
|
@ -140,8 +133,7 @@ export async function emitBudgetThreshold(args: {
|
|||
spent: number;
|
||||
limit: number;
|
||||
}): Promise<BudgetThresholdEventResult | undefined> {
|
||||
if (!_pi) return undefined;
|
||||
return (await _pi.emitExtensionEvent({
|
||||
return (await emitEvent({
|
||||
type: "budget_threshold",
|
||||
fraction: args.fraction,
|
||||
spent: args.spent,
|
||||
|
|
@ -157,8 +149,7 @@ export async function emitMilestoneStart(args: {
|
|||
title?: string;
|
||||
cwd: string;
|
||||
}): Promise<void> {
|
||||
if (!_pi) return;
|
||||
await _pi.emitExtensionEvent({ type: "milestone_start", ...args });
|
||||
await emitEvent({ type: "milestone_start", ...args });
|
||||
}
|
||||
|
||||
export async function emitMilestoneEnd(args: {
|
||||
|
|
@ -166,8 +157,7 @@ export async function emitMilestoneEnd(args: {
|
|||
status: "completed" | "failed" | "cancelled";
|
||||
cwd: string;
|
||||
}): Promise<void> {
|
||||
if (!_pi) return;
|
||||
await _pi.emitExtensionEvent({ type: "milestone_end", ...args });
|
||||
await emitEvent({ type: "milestone_end", ...args });
|
||||
}
|
||||
|
||||
export async function emitUnitStart(args: {
|
||||
|
|
@ -176,8 +166,7 @@ export async function emitUnitStart(args: {
|
|||
milestoneId?: string;
|
||||
cwd: string;
|
||||
}): Promise<void> {
|
||||
if (!_pi) return;
|
||||
await _pi.emitExtensionEvent({ type: "unit_start", ...args });
|
||||
await emitEvent({ type: "unit_start", ...args });
|
||||
}
|
||||
|
||||
export async function emitUnitEnd(args: {
|
||||
|
|
@ -187,6 +176,5 @@ export async function emitUnitEnd(args: {
|
|||
status: "completed" | "failed" | "cancelled" | "blocked";
|
||||
cwd: string;
|
||||
}): Promise<void> {
|
||||
if (!_pi) return;
|
||||
await _pi.emitExtensionEvent({ type: "unit_end", ...args });
|
||||
await emitEvent({ type: "unit_end", ...args });
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,8 +7,7 @@ import {
|
|||
nextMilestoneId,
|
||||
reserveMilestoneId,
|
||||
} from "./milestone-ids.js";
|
||||
import { sfRoot } from "./paths.js";
|
||||
import { resolveMilestoneFile } from "./paths.js";
|
||||
import { sfRoot, resolveMilestoneFile } from "./paths.js";
|
||||
|
||||
/**
|
||||
* A milestone is "reusable ghost" if it has no DB row, no worktree, and no
|
||||
|
|
|
|||
|
|
@ -213,7 +213,7 @@ test("detectProjectSignals: Rust project", (t) => {
|
|||
assert.ok(
|
||||
signals.verificationCommands.includes("cargo test -- --test-threads=2"),
|
||||
);
|
||||
assert.ok(signals.verificationCommands.includes("cargo clippy"));
|
||||
assert.ok(signals.verificationCommands.includes("cargo clippy -- -D warnings"));
|
||||
});
|
||||
|
||||
test("detectProjectSignals: Go project", (t) => {
|
||||
|
|
|
|||
|
|
@ -9,16 +9,16 @@ import {
|
|||
import { join } from "node:path";
|
||||
import { isStaleWrite } from "../auto/turn-epoch.js";
|
||||
import { withFileLockSync } from "../file-lock.js";
|
||||
import { sfRoot } from "../paths.js";
|
||||
import { sfRuntimeRoot } from "../paths.js";
|
||||
import { insertAuditEvent, isDbAvailable } from "../sf-db.js";
|
||||
import type { AuditEventEnvelope } from "./contracts.js";
|
||||
|
||||
function auditLogPath(basePath: string): string {
|
||||
return join(sfRoot(basePath), "audit", "events.jsonl");
|
||||
return join(sfRuntimeRoot(basePath), "audit", "events.jsonl");
|
||||
}
|
||||
|
||||
function ensureAuditDir(basePath: string): void {
|
||||
mkdirSync(join(sfRoot(basePath), "audit"), { recursive: true });
|
||||
mkdirSync(join(sfRuntimeRoot(basePath), "audit"), { recursive: true });
|
||||
}
|
||||
|
||||
export function buildAuditEnvelope(args: {
|
||||
|
|
|
|||
|
|
@ -445,7 +445,7 @@ export class WorktreeResolver {
|
|||
getMilestoneResquash(prefs)
|
||||
) {
|
||||
const result = resquashMilestoneOnMain(
|
||||
this.s.originalBasePath || this.s.basePath,
|
||||
projectRoot,
|
||||
milestoneId,
|
||||
startSha,
|
||||
);
|
||||
|
|
@ -472,7 +472,7 @@ export class WorktreeResolver {
|
|||
// no-merge paths returned above.
|
||||
try {
|
||||
emitWorktreeMerged(
|
||||
this.s.originalBasePath || this.s.basePath,
|
||||
projectRoot,
|
||||
milestoneId,
|
||||
{
|
||||
reason: "milestone-complete",
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue